From: Anna Likholat Date: Tue, 18 Aug 2020 05:38:48 +0000 (+0300) Subject: Java bindings (#506) X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=812cd1076b5d84d8037e11a4ef0656cdb587d5b5;p=platform%2Fupstream%2Fdldt.git Java bindings (#506) * java bindings added * bulid saples alongside with IE build * fixes * fixes * fixed comments * fixes --- diff --git a/inference-engine/CMakeLists.txt b/inference-engine/CMakeLists.txt index 5394e7e..0657f27 100644 --- a/inference-engine/CMakeLists.txt +++ b/inference-engine/CMakeLists.txt @@ -102,6 +102,10 @@ endif() add_subdirectory(ie_bridges/c) +if (ENABLE_JAVA) + add_subdirectory(ie_bridges/java) +endif() + add_cpplint_report_target() # diff --git a/inference-engine/cmake/features_ie.cmake b/inference-engine/cmake/features_ie.cmake index 102130f..7db7fea 100644 --- a/inference-engine/cmake/features_ie.cmake +++ b/inference-engine/cmake/features_ie.cmake @@ -91,6 +91,8 @@ ie_option (ENABLE_OPENCV "enables OpenCV" ON) ie_option (ENABLE_PYTHON "enables ie python bridge build" OFF) +ie_option (ENABLE_JAVA "enables ie java bridge build" OFF) + ie_dependent_option(ENABLE_CPPLINT "Enable cpplint checks during the build" ON "UNIX;NOT ANDROID" OFF) ie_dependent_option(ENABLE_CPPLINT_REPORT "Build cpplint report instead of failing the build" OFF "ENABLE_CPPLINT" OFF) diff --git a/inference-engine/ie_bridges/java/CMakeLists.txt b/inference-engine/ie_bridges/java/CMakeLists.txt new file mode 100644 index 0000000..5c9c66f --- /dev/null +++ b/inference-engine/ie_bridges/java/CMakeLists.txt @@ -0,0 +1,63 @@ +# Copyright (C) 2020 Intel Corporation + +find_package(Java REQUIRED) +include(UseJava) + +set(JAVA_AWT_INCLUDE_PATH NotNeeded) + +project(inference_engine_java_api) + +set(InferenceEngine_LIBRARIES inference_engine) + +find_package(JNI REQUIRED) + +file(GLOB_RECURSE sources cpp/*.cpp) +add_library(${PROJECT_NAME} SHARED ${sources}) + +file(GLOB_RECURSE java_source org/intel/openvino/*.java) +add_jar(inference_engine_jar ${java_source} + OUTPUT_NAME ${PROJECT_NAME} + OUTPUT_DIR ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}) + +target_link_libraries(${PROJECT_NAME} PRIVATE inference_engine) +target_include_directories(${PROJECT_NAME} PRIVATE ${JNI_INCLUDE_DIRS}) + +if(ENABLE_TESTS) + file(DOWNLOAD https://search.maven.org/remotecontent?filepath=junit/junit/4.13/junit-4.13.jar + ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/junit-4.13.jar) + file(DOWNLOAD https://search.maven.org/remotecontent?filepath=org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar + ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/hamcrest-core-1.3.jar) + + file(GLOB_RECURSE java_tests_src tests/*.java) + add_jar(ie_java_api_tests_jar ${java_tests_src} + OUTPUT_NAME ie_java_api_tests + OUTPUT_DIR ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} + INCLUDE_JARS ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/*) +endif() + +if(ENABLE_SAMPLES) + find_package(OpenCV) + if(OpenCV_FOUND) + if(EXISTS "${OpenCV_INSTALL_PATH}/share/java") + file(GLOB_RECURSE java_opencv_src ${OpenCV_INSTALL_PATH}/share/java/*.jar) + elseif(EXISTS "${OpenCV_INSTALL_PATH}/bin") + file(GLOB java_opencv_src ${OpenCV_INSTALL_PATH}/bin/*.jar) + endif() + if(EXISTS "${java_opencv_src}") + file(GLOB java_samples samples/*) + foreach(sample ${java_samples}) + if(IS_DIRECTORY ${sample}) + get_filename_component(sample_name "${sample}" NAME) + file(GLOB_RECURSE sample_src ${sample}/*.java) + add_jar("${sample_name}_jar" + SOURCES ${sample_src} samples/ArgumentParser.java + OUTPUT_NAME ${sample_name} + OUTPUT_DIR ${CMAKE_LIBRARY_OUTPUT_DIRECTORY} + INCLUDE_JARS ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/* ${java_opencv_src}) + endif() + endforeach() + else() + message(WARNING ".jar file wasn't found for OpenCV. Java samples won't be build.") + endif() + endif() +endif() diff --git a/inference-engine/ie_bridges/java/README.md b/inference-engine/ie_bridges/java/README.md new file mode 100644 index 0000000..ad7935c --- /dev/null +++ b/inference-engine/ie_bridges/java/README.md @@ -0,0 +1,19 @@ +## Software Requirements +- openjdk 11 + +### Linux +To install openjdk: + +* Ubuntu 18.04 +```bash +sudo apt-get install -y openjdk-11-jdk +``` + +* Ubuntu 16.04 +```bash +sudo apt-get install -y openjdk-9-jdk +``` + +## Building on Linux + +To create Inference Engine Java API add ```-DENABLE_JAVA=ON``` flag in cmake command while building the Inference Engine. diff --git a/inference-engine/ie_bridges/java/cpp/blob.cpp b/inference-engine/ie_bridges/java/cpp/blob.cpp new file mode 100644 index 0000000..aa84f6f --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/blob.cpp @@ -0,0 +1,229 @@ +#include + +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_GetTensorDesc(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "GetTensorDesc"; + try + { + Blob::Ptr *output = reinterpret_cast(addr); + TensorDesc *tDesc = new TensorDesc((*output)->getTensorDesc()); + + return (jlong)tDesc; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_GetBlob(JNIEnv *env, jobject obj, jlong tensorDescAddr) +{ + static const char method_name[] = "GetBlob"; + try + { + TensorDesc *tDesc = (TensorDesc *)tensorDescAddr; + + Blob::Ptr *blob = new Blob::Ptr(); + *blob = make_shared_blob(*tDesc); + + return (jlong)blob; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobByte(JNIEnv *env, jobject obj, jlong tensorDescAddr, jbyteArray data) +{ + static const char method_name[] = "BlobByte"; + try + { + TensorDesc *tDesc = (TensorDesc *)tensorDescAddr; + + Blob::Ptr *blob = new Blob::Ptr(); + + *blob = make_shared_blob((*tDesc)); + (*blob)->allocate(); + env->GetByteArrayRegion(data, 0, (*blob)->size(), (*blob)->buffer()); + + return (jlong)blob; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobFloat(JNIEnv *env, jobject obj, jlong tensorDescAddr, jfloatArray data) +{ + static const char method_name[] = "BlobFloat"; + try + { + TensorDesc *tDesc = (TensorDesc *)tensorDescAddr; + + Blob::Ptr *blob = new Blob::Ptr(); + + *blob = make_shared_blob((*tDesc)); + (*blob)->allocate(); + env->GetFloatArrayRegion(data, 0, (*blob)->size(), (*blob)->buffer()); + + return (jlong)blob; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobCArray(JNIEnv *env, jobject obj, jlong tensorDescAddr, jlong matDataAddr) +{ + static const char method_name[] = "BlobCArray"; + try + { + TensorDesc *tDesc = (TensorDesc *)tensorDescAddr; + + auto precision = tDesc->getPrecision(); + + std::vector dims = tDesc->getDims(); + Blob::Ptr *blob = new Blob::Ptr(); + + switch (precision) { + case Precision::FP32: + { + float *data = (float *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + case Precision::Q78: + case Precision::I16: + case Precision::FP16: + { + short *data = (short *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + case Precision::U8: + { + uint8_t *data = (uint8_t *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + case Precision::I8: + { + int8_t *data = (int8_t *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + case Precision::I32: + { + int32_t *data = (int32_t *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + case Precision::BF16: + { + short *data = (short *) matDataAddr; + *blob = make_shared_blob((*tDesc), data); + break; + } + default: + throw std::runtime_error("Unsupported precision value!"); + } + + return (jlong)blob; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_Blob_size(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "size"; + try + { + Blob::Ptr *output = reinterpret_cast(addr); + int size = (*output)->size(); + + return size; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_rmap(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "rmap"; + try + { + Blob::Ptr *output = reinterpret_cast(addr); + + if ((*output)->is()) { + LockedMemory *lmem = new LockedMemory (as(*output)->rmap()); + return (jlong)lmem; + } else { + throw std::runtime_error("Target Blob cannot be cast to the MemoryBlob!"); + } + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_Blob_delete(JNIEnv *, jobject, jlong addr) +{ + Blob::Ptr *output = reinterpret_cast(addr); + delete output; +} diff --git a/inference-engine/ie_bridges/java/cpp/cnn_network.cpp b/inference-engine/ie_bridges/java/cpp/cnn_network.cpp new file mode 100644 index 0000000..97341ae --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/cnn_network.cpp @@ -0,0 +1,223 @@ +#include + +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jstring JNICALL Java_org_intel_openvino_CNNNetwork_getName(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getName"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + return env->NewStringUTF(network->getName().c_str()); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_CNNNetwork_getBatchSize(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getBatchSize"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + return static_cast(network->getBatchSize()); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_GetInputsInfo(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "GetInputsInfo"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + const InputsDataMap &inputs_map = network->getInputsInfo(); + + jclass hashMapClass = env->FindClass("java/util/HashMap"); + jmethodID hashMapInit = env->GetMethodID(hashMapClass, "", "()V"); + jobject hashMapObj = env->NewObject(hashMapClass, hashMapInit); + jmethodID hashMapPut = env->GetMethodID(hashMapClass, "put", + "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"); + + jclass inputInfoClass = env->FindClass("org/intel/openvino/InputInfo"); + jmethodID inputInfoConstructor = env->GetMethodID(inputInfoClass,"","(J)V"); + + for (const auto &item : inputs_map) { + jobject inputInfoObj = env->NewObject(inputInfoClass, inputInfoConstructor, (jlong)(item.second.get())); + env->CallObjectMethod(hashMapObj, hashMapPut, env->NewStringUTF(item.first.c_str()), inputInfoObj); + } + + env->PopLocalFrame(hashMapObj); + + return hashMapObj; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_GetOutputsInfo(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "GetOutputsInfo"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + const OutputsDataMap &outputs_map = network->getOutputsInfo(); + + jclass hashMapClass = env->FindClass("java/util/HashMap"); + jmethodID hashMapInit = env->GetMethodID(hashMapClass, "", "()V"); + jobject hashMapObj = env->NewObject(hashMapClass, hashMapInit); + jmethodID hashMapPut = env->GetMethodID(hashMapClass, "put", + "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"); + + jclass outputInfoClass = env->FindClass("org/intel/openvino/Data"); + jmethodID outputInfoConstructor = env->GetMethodID(outputInfoClass,"","(J)V"); + + for (const auto &item : outputs_map) { + jobject outputInfoObj = env->NewObject(outputInfoClass, outputInfoConstructor, (jlong)(item.second.get())); + env->CallObjectMethod(hashMapObj, hashMapPut, env->NewStringUTF(item.first.c_str()), outputInfoObj); + } + + env->PopLocalFrame(hashMapObj); + + return hashMapObj; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_reshape(JNIEnv *env, jobject obj, jlong addr, jobject input) +{ + static const char method_name[] = "reshape"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + network->reshape(javaMapToMap_1(env, input)); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_getInputShapes(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "getInputShapes"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + std::map> shapes = network->getInputShapes(); + + jclass hashMapClass = env->FindClass("java/util/HashMap"); + jmethodID hashMapInit = env->GetMethodID(hashMapClass, "", "()V"); + jobject hashMapObj = env->NewObject(hashMapClass, hashMapInit); + jmethodID hashMapPut = env->GetMethodID(hashMapClass, "put", + "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"); + + for (const auto &item : shapes) { + jintArray result = env->NewIntArray(item.second.size()); + + jint *arr = env->GetIntArrayElements(result, nullptr); + for (int i = 0; i < item.second.size(); ++i) + arr[i] = item.second[i]; + + env->ReleaseIntArrayElements(result, arr, 0); + env->CallObjectMethod(hashMapObj, hashMapPut, env->NewStringUTF(item.first.c_str()), result); + } + + env->PopLocalFrame(hashMapObj); + + return hashMapObj; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_addOutput(JNIEnv *env, jobject obj, jlong addr, jstring layerName, jint outputIndex){ + static const char method_name[] = "addOutput"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + + std::string c_outputName = jstringToString(env, layerName); + size_t c_outputIndex = static_cast(outputIndex); + + network->addOutput(c_outputName, c_outputIndex); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_addOutput1(JNIEnv *env, jobject obj, jlong addr, jstring layerName){ + static const char method_name[] = "addOutput"; + try + { + CNNNetwork *network = (CNNNetwork *)addr; + + std::string c_outputName = jstringToString(env, layerName); + + network->addOutput(c_outputName); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_delete(JNIEnv *env, jobject obj, jlong addr) +{ + CNNNetwork *network = (CNNNetwork *)addr; + delete network; +} diff --git a/inference-engine/ie_bridges/java/cpp/enum_mapping.hpp b/inference-engine/ie_bridges/java/cpp/enum_mapping.hpp new file mode 100644 index 0000000..9f97c3d --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/enum_mapping.hpp @@ -0,0 +1,85 @@ +#include +#include "inference_engine.hpp" + +// +// resize_algorithm +// +static const std::map resize_alg_map = { + {0, InferenceEngine::ResizeAlgorithm::NO_RESIZE}, + {1, InferenceEngine::ResizeAlgorithm::RESIZE_AREA}, + {2, InferenceEngine::ResizeAlgorithm::RESIZE_BILINEAR} +}; + +// +// layout +// +static const std::map layout_map = { + {0, InferenceEngine::Layout::ANY}, + {1, InferenceEngine::Layout::NCHW}, + {2, InferenceEngine::Layout::NHWC}, + {3, InferenceEngine::Layout::NCDHW}, + {4, InferenceEngine::Layout::NDHWC}, + {64, InferenceEngine::Layout::OIHW}, + {95, InferenceEngine::Layout::SCALAR}, + {96, InferenceEngine::Layout::C}, + {128, InferenceEngine::Layout::CHW}, + {192, InferenceEngine::Layout::HW}, + {193, InferenceEngine::Layout::NC}, + {194, InferenceEngine::Layout::CN}, + {200, InferenceEngine::Layout::BLOCKED} +}; + +// +// precision +// +static const std::map precision_map = { + {255, InferenceEngine::Precision::UNSPECIFIED}, + {0, InferenceEngine::Precision::MIXED}, + {10, InferenceEngine::Precision::FP32}, + {11, InferenceEngine::Precision::FP16}, + {20, InferenceEngine::Precision::Q78}, + {30, InferenceEngine::Precision::I16}, + {40, InferenceEngine::Precision::U8}, + {50, InferenceEngine::Precision::I8}, + {60, InferenceEngine::Precision::U16}, + {70, InferenceEngine::Precision::I32}, + {72, InferenceEngine::Precision::I64}, + {71, InferenceEngine::Precision::BIN}, + {80, InferenceEngine::Precision::CUSTOM} +}; + +// +// wait_mode +// +static const std::map wait_mode_map = { + {-1, InferenceEngine::IInferRequest::WaitMode::RESULT_READY}, + {0, InferenceEngine::IInferRequest::WaitMode::STATUS_ONLY} +}; + +// +// status_code +// +static const std::map status_code_map = { + {InferenceEngine::StatusCode::OK, 0}, + {InferenceEngine::StatusCode::GENERAL_ERROR, -1}, + {InferenceEngine::StatusCode::NOT_IMPLEMENTED, -2}, + {InferenceEngine::StatusCode::NETWORK_NOT_LOADED, -3}, + {InferenceEngine::StatusCode::PARAMETER_MISMATCH, -4}, + {InferenceEngine::StatusCode::NOT_FOUND, -5}, + {InferenceEngine::StatusCode::OUT_OF_BOUNDS, -6}, + {InferenceEngine::StatusCode::UNEXPECTED, -7}, + {InferenceEngine::StatusCode::REQUEST_BUSY, -8}, + {InferenceEngine::StatusCode::RESULT_NOT_READY, -9}, + {InferenceEngine::StatusCode::NOT_ALLOCATED, -10}, + {InferenceEngine::StatusCode::INFER_NOT_STARTED, -11}, + {InferenceEngine::StatusCode::NETWORK_NOT_READ, -12}, +}; + +// +// layer_status +// +static const std::map layer_status_map = { + {InferenceEngine::InferenceEngineProfileInfo::LayerStatus::NOT_RUN, 0}, + {InferenceEngine::InferenceEngineProfileInfo::LayerStatus::OPTIMIZED_OUT, 1}, + {InferenceEngine::InferenceEngineProfileInfo::LayerStatus::EXECUTED, 2}, +}; diff --git a/inference-engine/ie_bridges/java/cpp/executable_network.cpp b/inference-engine/ie_bridges/java/cpp/executable_network.cpp new file mode 100644 index 0000000..620b92a --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/executable_network.cpp @@ -0,0 +1,58 @@ +#include + +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_ExecutableNetwork_CreateInferRequest(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "CreateInferRequest"; + try + { + ExecutableNetwork *executable_network = (ExecutableNetwork *)addr; + + InferRequest *infer_request = new InferRequest(); + *infer_request = executable_network->CreateInferRequest(); + + return (jlong)infer_request; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_ExecutableNetwork_GetMetric(JNIEnv *env, jobject obj, jlong addr, jstring name) +{ + static const char method_name[] = "GetMetric"; + try + { + ExecutableNetwork *executable_network = (ExecutableNetwork *)addr; + + Parameter *parameter = new Parameter(); + *parameter = executable_network->GetMetric(jstringToString(env, name)); + + return (jlong)parameter; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_ExecutableNetwork_delete(JNIEnv *, jobject, jlong addr) +{ + ExecutableNetwork *executable_network = (ExecutableNetwork *)addr; + delete executable_network; +} diff --git a/inference-engine/ie_bridges/java/cpp/ie_core.cpp b/inference-engine/ie_bridges/java/cpp/ie_core.cpp new file mode 100644 index 0000000..896e82b --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/ie_core.cpp @@ -0,0 +1,321 @@ +#include // JNI header provided by JDK +#include + +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore(JNIEnv *env, jobject obj) +{ + static const char method_name[] = "GetCore"; + try + { + Core *core = new Core(); + return (jlong)core; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore_1(JNIEnv *env, jobject obj, jstring xmlConfigFile) +{ + static const char method_name[] = "GetCore_1"; + try + { + std::string n_xml = jstringToString(env, xmlConfigFile); + Core *core = new Core(n_xml); + return (jlong)core; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_ReadNetwork1(JNIEnv *env, jobject obj, jlong coreAddr, jstring xml, jstring bin) +{ + static const char method_name[] = "ReadNetwork1"; + try + { + std::string n_xml = jstringToString(env, xml); + std::string n_bin = jstringToString(env, bin); + + Core *core = (Core *)coreAddr; + + CNNNetwork *network = new CNNNetwork(); + *network = core->ReadNetwork(n_xml, n_bin); + + return (jlong)network; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_ReadNetwork(JNIEnv *env, jobject obj, jlong coreAddr, jstring xml) +{ + static const char method_name[] = "ReadNetwork"; + try + { + std::string n_xml = jstringToString(env, xml); + + Core *core = (Core *)coreAddr; + + CNNNetwork *network = new CNNNetwork(); + *network = core->ReadNetwork(n_xml); + + return (jlong)network; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_LoadNetwork(JNIEnv *env, jobject obj, jlong coreAddr, jlong netAddr, jstring device) +{ + static const char method_name[] = "LoadNetwork"; + try + { + std::string n_device = jstringToString(env, device); + + Core *core = (Core *)coreAddr; + + CNNNetwork *network = (CNNNetwork *)netAddr; + + ExecutableNetwork *executable_network = new ExecutableNetwork(); + *executable_network = core->LoadNetwork(*network, n_device); + + return (jlong)executable_network; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_LoadNetwork1(JNIEnv *env, jobject obj, jlong coreAddr, jlong netAddr, jstring device, jobject config) +{ + static const char method_name[] = "LoadNetwork1"; + try + { + std::string n_device = jstringToString(env, device); + + Core *core = (Core *)coreAddr; + CNNNetwork *network = (CNNNetwork *)netAddr; + + ExecutableNetwork *executable_network = new ExecutableNetwork(); + *executable_network = core->LoadNetwork(*network, n_device, javaMapToMap(env, config)); + + return (jlong)executable_network; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_RegisterPlugin(JNIEnv *env, jobject obj, jlong addr, jstring pluginName, jstring deviceName) +{ + static const char method_name[] = "RegisterPlugin"; + try + { + const std::string n_plugin = jstringToString(env, pluginName); + const std::string n_device = jstringToString(env, deviceName); + + Core *core = (Core *) addr; + core->RegisterPlugin(n_plugin, n_device); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_UnregisterPlugin(JNIEnv *env, jobject obj, jlong addr, jstring deviceName) +{ + static const char method_name[] = "UnregisterPlugin"; + try + { + const std::string n_device = jstringToString(env, deviceName); + + Core *core = (Core *) addr; + core->UnregisterPlugin(n_device); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_RegisterPlugins(JNIEnv *env, jobject obj, jlong addr, jstring xmlConfigFile) +{ + static const char method_name[] = "RegisterPlugins"; + try + { + const std::string n_xml = jstringToString(env, xmlConfigFile); + + Core *core = (Core *) addr; + core->RegisterPlugins(n_xml); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_AddExtension(JNIEnv *env, jobject obj, jlong addr, jstring extension) +{ + static const char method_name[] = "AddExtension"; + try + { + const std::string n_extension = jstringToString(env, extension); + + const InferenceEngine::IExtensionPtr extension = + InferenceEngine::make_so_pointer(n_extension); + + Core *core = (Core *) addr; + core->AddExtension(extension); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_AddExtension1(JNIEnv *env, jobject obj, jlong addr, jstring extension, jstring deviceName) +{ + static const char method_name[] = "AddExtension"; + try + { + const std::string n_extension = jstringToString(env, extension); + const std::string n_device = jstringToString(env, deviceName); + + InferenceEngine::IExtensionPtr extension = + InferenceEngine::make_so_pointer(n_extension); + + Core *core = (Core *) addr; + core->AddExtension(extension, n_device); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig(JNIEnv *env, jobject obj, jlong addr, jobject config, jstring deviceName) +{ + static const char method_name[] = "SetConfig"; + try + { + Core *core = (Core *) addr; + core->SetConfig(javaMapToMap(env, config), jstringToString(env, deviceName)); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig1(JNIEnv *env, jobject obj, jlong addr, jobject config) +{ + static const char method_name[] = "SetConfig"; + try + { + Core *core = (Core *) addr; + core->SetConfig(javaMapToMap(env, config)); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetConfig(JNIEnv *env, jobject obj, jlong addr, jstring deviceName, jstring name) +{ + static const char method_name[] = "GetConfig"; + try + { + Core *core = (Core *) addr; + Parameter *parameter = new Parameter(); + *parameter = core->GetConfig(jstringToString(env, deviceName), jstringToString(env, name)); + + return (jlong) parameter; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_delete(JNIEnv *, jobject, jlong addr) +{ + Core *core = (Core *)addr; + delete core; +} diff --git a/inference-engine/ie_bridges/java/cpp/infer_request.cpp b/inference-engine/ie_bridges/java/cpp/infer_request.cpp new file mode 100644 index 0000000..e675f2b --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/infer_request.cpp @@ -0,0 +1,233 @@ +#include +#include + +#include "openvino_java.hpp" +#include "enum_mapping.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_Infer(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "Infer"; + try + { + InferRequest *infer_request = (InferRequest *)addr; + infer_request->Infer(); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_StartAsync(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "StartAsync"; + try + { + InferRequest *infer_request = (InferRequest *)addr; + infer_request->StartAsync(); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_InferRequest_Wait(JNIEnv *env, jobject obj, jlong addr, jint wait_mode) +{ + static const char method_name[] = "Wait"; + try + { + auto it = wait_mode_map.find(wait_mode); + InferRequest *infer_request = (InferRequest *)addr; + + if (it == wait_mode_map.end()) + throw std::runtime_error("No such WaitMode value!"); + + InferenceEngine::StatusCode status_code = infer_request->Wait(it->second); + + auto code = status_code_map.find(status_code); + if (code == status_code_map.end()) + throw std::runtime_error("No such StatusCode value!"); + + return (jint)(code->second); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +static std::mutex map_mutex; +static std::map runnable_glob_map; + +JNIEXPORT jint JNICALL Java_org_intel_openvino_InferRequest_SetCompletionCallback(JNIEnv *env, jobject, jlong addr, jobject runnable){ + static const char method_name[] = "SetCompletionCallback"; + try + { + const std::lock_guard lock(map_mutex); + + InferRequest *infer_request = (InferRequest *)addr; + jobject runnable_glob = env->NewGlobalRef(runnable); + + runnable_glob_map.insert( std::make_pair(addr, runnable_glob) ); + + JavaVM* jvm; + env->GetJavaVM(&jvm); + int version = env->GetVersion(); + + infer_request->SetCompletionCallback( + [jvm, version, runnable_glob] { + JNIEnv* myNewEnv; + JavaVMAttachArgs args; + args.version = version; + args.name = NULL; + args.group = NULL; + jvm->AttachCurrentThread((void**)&myNewEnv, &args); + + jclass runnable_class = myNewEnv->GetObjectClass(runnable_glob); + jmethodID run_method_id = myNewEnv->GetMethodID(runnable_class, "run","()V"); + myNewEnv->CallNonvirtualVoidMethod(runnable_glob, runnable_class, run_method_id); + + jvm->DetachCurrentThread(); + }); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT long JNICALL Java_org_intel_openvino_InferRequest_GetBlob(JNIEnv *env, jobject obj, jlong addr, jstring output_name) +{ + static const char method_name[] = "GetBlob"; + try + { + InferRequest *infer_request = (InferRequest *)addr; + + std::string n_output_name = jstringToString(env, output_name); + + Blob::Ptr *output = new Blob::Ptr(); + *output = infer_request->GetBlob(n_output_name); + + return (jlong)(output); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_SetBlob(JNIEnv *env, jobject obj, jlong addr, jstring input_name, jlong blobAddr) +{ + static const char method_name[] = "SetBlob"; + try + { + InferRequest *infer_request = (InferRequest *)addr; + + Blob::Ptr *blob = reinterpret_cast(blobAddr); + + std::string n_input_name = jstringToString(env, input_name); + + infer_request->SetBlob(n_input_name, (*blob)); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT jobject JNICALL Java_org_intel_openvino_InferRequest_GetPerformanceCounts(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "GetPerformanceCounts"; + try + { + InferRequest *infer_request = (InferRequest *)addr; + std::map perfomance; + perfomance = infer_request->GetPerformanceCounts(); + + jclass hashMap_class = env->FindClass("java/util/LinkedHashMap"); + jmethodID init_method_id = env->GetMethodID(hashMap_class, "", "()V"); + jobject hashMap_object = env->NewObject(hashMap_class, init_method_id); + jmethodID put_method_id = env->GetMethodID(hashMap_class, "put", + "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;"); + + jclass IEProfileInfo_class = env->FindClass("org/intel/openvino/InferenceEngineProfileInfo"); + jmethodID IEProfileInfo_init_id = env->GetMethodID(IEProfileInfo_class,"", + "(Lorg/intel/openvino/InferenceEngineProfileInfo$LayerStatus;JJLjava/lang/String;Ljava/lang/String;I)V"); + + jclass layerStatus_enum = env->FindClass("org/intel/openvino/InferenceEngineProfileInfo$LayerStatus"); + jmethodID valueOf_method_id = env->GetStaticMethodID(layerStatus_enum,"valueOf","(I)Lorg/intel/openvino/InferenceEngineProfileInfo$LayerStatus;"); + + for (const auto& itr : perfomance) { + InferenceEngine::InferenceEngineProfileInfo ie_prof_info = itr.second; + auto it = layer_status_map.find(ie_prof_info.status); + + if (it == layer_status_map.end()) + throw std::runtime_error("No such LayerStatus value!"); + + jobject layer_status = env->CallStaticObjectMethod(layerStatus_enum, valueOf_method_id, it->second); + + std::string layer_type(ie_prof_info.layer_type); + std::string exec_type(ie_prof_info.exec_type); + + jobject IEProfileInfo = env->NewObject(IEProfileInfo_class, IEProfileInfo_init_id, layer_status, + ie_prof_info.realTime_uSec, ie_prof_info.cpu_uSec, env->NewStringUTF(exec_type.c_str()), + env->NewStringUTF(layer_type.c_str()), static_cast(ie_prof_info.execution_index)); + + env->CallObjectMethod(hashMap_object, put_method_id, env->NewStringUTF(itr.first.c_str()), IEProfileInfo); + } + + env->PopLocalFrame(hashMap_object); + return hashMap_object; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_delete(JNIEnv *env, jobject, jlong addr) +{ + auto it = runnable_glob_map.find(addr); + if (it != runnable_glob_map.end()) { + env->DeleteGlobalRef(it->second); + } + + InferRequest *infer_request = (InferRequest *)addr; + delete infer_request; +} diff --git a/inference-engine/ie_bridges/java/cpp/input_info.cpp b/inference-engine/ie_bridges/java/cpp/input_info.cpp new file mode 100644 index 0000000..8f4388a --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/input_info.cpp @@ -0,0 +1,134 @@ +#include // JNI header provided by JDK +#include // C Standard IO Header +#include +#include + +#include "openvino_java.hpp" +#include "enum_mapping.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_InputInfo_getPreProcess(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "getPreProcess"; + try + { + InputInfo *input_info = (InputInfo*)addr; + return (jlong)(&input_info->getPreProcess()); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_InputInfo_SetLayout(JNIEnv *env, jobject obj, jlong addr, jint layout) +{ + static const char method_name[] = "SetLayout"; + try + { + InputInfo *input_info = (InputInfo*)addr; + auto it = layout_map.find(layout); + + if (it == layout_map.end()) + throw std::runtime_error("No such layout value!"); + + input_info->setLayout(it->second); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_InputInfo_getLayout(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getLayout"; + try + { + InputInfo *input_info = (InputInfo*)addr; + Layout layout = input_info->getLayout(); + + return find_by_value(layout_map, layout); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_InputInfo_SetPrecision(JNIEnv *env, jobject obj, jlong addr, jint precision) +{ + static const char method_name[] = "SetPrecision"; + try + { + InputInfo *input_info = (InputInfo*)addr; + auto it = precision_map.find(precision); + + if (it == precision_map.end()) + throw std::runtime_error("No such precision value!"); + + input_info->setPrecision(it->second); + + } catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_InputInfo_getPrecision(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getPrecision"; + try + { + InputInfo *input_info = (InputInfo*)addr; + Precision precision = input_info->getPrecision(); + + return find_by_value(precision_map, precision); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_InputInfo_GetTensorDesc(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "GetTensorDesc"; + try + { + InputInfo *input_info = (InputInfo*)addr; + TensorDesc *tDesc = new TensorDesc(input_info->getTensorDesc()); + + return (jlong)tDesc; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} diff --git a/inference-engine/ie_bridges/java/cpp/jni_common.hpp b/inference-engine/ie_bridges/java/cpp/jni_common.hpp new file mode 100644 index 0000000..231d271 --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/jni_common.hpp @@ -0,0 +1,183 @@ +#include // JNI header provided by JDK +#include // C Standard IO Header + +static void throwJavaException(JNIEnv *env, const std::exception *e, const char *method) +{ + std::string what = "unknown exception"; + jclass je = 0; + + if(e){ + std::string exception_type = "InferenceEngineException"; + what = '\n' + exception_type + ": " + '\n' + '\t' + std::string(method) + ": " + e->what(); + } + + if (!je) + je = env->FindClass("java/lang/Exception"); + + env->ThrowNew(je, what.c_str()); + + (void)method; +} + +static std::string jstringToString(JNIEnv *env, jstring jstr) +{ + static const char method_name[] = "jstringToString"; + try + { + const char *utf_str = env->GetStringUTFChars(jstr, 0); + std::string n_str(utf_str ? utf_str : ""); + env->ReleaseStringUTFChars(jstr, utf_str); + return n_str; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return nullptr; +} + +static std::map javaMapToMap(JNIEnv *env, jobject java_map) +{ + static const char method_name[] = "javaMapToMap"; + try + { + jclass map_class = env->GetObjectClass(java_map); + jmethodID entrySet_method_id = env->GetMethodID(map_class, "entrySet", "()Ljava/util/Set;"); + jobject entry_set = env->CallObjectMethod(java_map, entrySet_method_id); + + jclass set_class = env->FindClass("java/util/Set"); + jmethodID iterator_method_id = env->GetMethodID(set_class, "iterator", "()Ljava/util/Iterator;"); + jobject iterator = env->CallObjectMethod(entry_set, iterator_method_id); + + jclass iterator_class = env->FindClass("java/util/Iterator"); + jmethodID hasNext_method_id = env->GetMethodID(iterator_class, "hasNext", "()Z"); + jmethodID next_method_id = env->GetMethodID(iterator_class, "next", "()Ljava/lang/Object;"); + bool hasNext = (bool) (env->CallBooleanMethod(iterator, hasNext_method_id) == JNI_TRUE); + + jclass mapentry_class = (env)->FindClass("java/util/Map$Entry"); + jmethodID get_key_method_id = env->GetMethodID(mapentry_class, "getKey", "()Ljava/lang/Object;"); + jmethodID get_value_method_id = env->GetMethodID(mapentry_class, "getValue", "()Ljava/lang/Object;"); + + jclass string_class = env->FindClass("java/lang/String"); + jmethodID to_string_method_id = env->GetMethodID(string_class, "toString", "()Ljava/lang/String;"); + + std::map c_map; + + while(hasNext) { + jobject entry = env->CallObjectMethod(iterator, next_method_id); + + jstring key = (jstring) env->CallObjectMethod(env->CallObjectMethod(entry, get_key_method_id), to_string_method_id); + jstring value = (jstring) env->CallObjectMethod(env->CallObjectMethod(entry, get_value_method_id), to_string_method_id); + + c_map.insert(std::make_pair(jstringToString(env, key), jstringToString(env, value))); + + hasNext = (bool) (env->CallBooleanMethod(iterator, hasNext_method_id) == JNI_TRUE); + } + + return c_map; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return std::map(); +} + +static std::map> javaMapToMap_1(JNIEnv *env, jobject java_map) +{ + static const char method_name[] = "javaMapToMap"; + try + { + jclass map_class = env->GetObjectClass(java_map); + jmethodID entrySet_method_id = env->GetMethodID(map_class, "entrySet", "()Ljava/util/Set;"); + jobject entry_set = env->CallObjectMethod(java_map, entrySet_method_id); + + jclass set_class = env->FindClass("java/util/Set"); + jmethodID iterator_method_id = env->GetMethodID(set_class, "iterator", "()Ljava/util/Iterator;"); + jobject iterator = env->CallObjectMethod(entry_set, iterator_method_id); + + jclass iterator_class = env->FindClass("java/util/Iterator"); + jmethodID hasNext_method_id = env->GetMethodID(iterator_class, "hasNext", "()Z"); + jmethodID next_method_id = env->GetMethodID(iterator_class, "next", "()Ljava/lang/Object;"); + bool hasNext = (bool) (env->CallBooleanMethod(iterator, hasNext_method_id) == JNI_TRUE); + + jclass mapentry_class = (env)->FindClass("java/util/Map$Entry"); + jmethodID get_key_method_id = env->GetMethodID(mapentry_class, "getKey", "()Ljava/lang/Object;"); + jmethodID get_value_method_id = env->GetMethodID(mapentry_class, "getValue", "()Ljava/lang/Object;"); + + jclass string_class = env->FindClass("java/lang/String"); + jmethodID to_string_method_id = env->GetMethodID(string_class, "toString", "()Ljava/lang/String;"); + + std::map> c_map; + + while(hasNext) { + jobject entry = env->CallObjectMethod(iterator, next_method_id); + + jstring key = (jstring)env->CallObjectMethod(env->CallObjectMethod(entry, get_key_method_id), to_string_method_id); + jintArray value = (jintArray)env->CallObjectMethod(entry, get_value_method_id); + + const jsize length = env->GetArrayLength(value); + jint *data = env->GetIntArrayElements(value, 0); + + c_map.insert(std::make_pair(jstringToString(env, key), std::vector(data, data + length))); + + env->ReleaseIntArrayElements(value, data, 0); + + hasNext = (bool)(env->CallBooleanMethod(iterator, hasNext_method_id) == JNI_TRUE); + } + + return c_map; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return std::map>(); +} + +template +static Key find_by_value(const std::map& map, Val& value) +{ + for (auto &itr : map) + if (itr.second == value) + return itr.first; + + throw std::runtime_error("No such value in java bindings enum!"); +} + +static std::vector jintArrayToVector(JNIEnv *env, jintArray dims) +{ + static const char method_name[] = "jintArrayToVector"; + try + { + const jsize length = env->GetArrayLength(dims); + jint *data = env->GetIntArrayElements(dims, 0); + + std::vector res(data, data + length); + env->ReleaseIntArrayElements(dims, data, 0); + + return res; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + + return std::vector(); +} diff --git a/inference-engine/ie_bridges/java/cpp/locked_memory.cpp b/inference-engine/ie_bridges/java/cpp/locked_memory.cpp new file mode 100644 index 0000000..20463ae --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/locked_memory.cpp @@ -0,0 +1,53 @@ +#include +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_asFloat(JNIEnv *env, jobject obj, jlong addr, jfloatArray res) +{ + static const char method_name[] = "asFloat"; + try + { + LockedMemory *lmem = (LockedMemory *) addr; + const float *buffer = lmem->as(); + + const jsize size = env->GetArrayLength(res); + env->SetFloatArrayRegion(res, 0, size, buffer); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_asByte(JNIEnv *env, jobject obj, jlong addr, jbyteArray res) +{ + static const char method_name[] = "asByte"; + try + { + LockedMemory *lmem = (LockedMemory *) addr; + const uint8_t *buffer = lmem->as(); + + const jsize size = env->GetArrayLength(res); + env->SetByteArrayRegion(res, 0, size, reinterpret_cast(buffer)); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_delete(JNIEnv *, jobject, jlong addr) +{ + LockedMemory *lmem = (LockedMemory *) addr; + delete lmem; +} diff --git a/inference-engine/ie_bridges/java/cpp/openvino_java.hpp b/inference-engine/ie_bridges/java/cpp/openvino_java.hpp new file mode 100644 index 0000000..781b4b7 --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/openvino_java.hpp @@ -0,0 +1,111 @@ +#include + +#ifdef __cplusplus +extern "C" +{ +#endif + +// +// IECore +// +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_ReadNetwork1(JNIEnv *, jobject, jlong, jstring, jstring); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_ReadNetwork(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_LoadNetwork(JNIEnv *, jobject, jlong, jlong, jstring); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_LoadNetwork1(JNIEnv *, jobject, jlong, jlong, jstring, jobject); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_RegisterPlugin(JNIEnv *, jobject, jlong, jstring, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_UnregisterPlugin(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_AddExtension(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_AddExtension1(JNIEnv *, jobject, jlong, jstring, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_RegisterPlugins(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig(JNIEnv *, jobject, jlong, jobject, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig1(JNIEnv *, jobject, jlong, jobject); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetConfig(JNIEnv *, jobject, jlong, jstring, jstring); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore(JNIEnv *, jobject); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore_1(JNIEnv *, jobject, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_delete(JNIEnv *, jobject, jlong); + +// +// CNNNetwork +// +JNIEXPORT jstring JNICALL Java_org_intel_openvino_CNNNetwork_getName(JNIEnv *, jobject, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_CNNNetwork_getBatchSize(JNIEnv *, jobject, jlong); +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_GetInputsInfo(JNIEnv *, jobject, jlong); +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_GetOutputsInfo(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_reshape(JNIEnv *, jobject, jlong, jobject); +JNIEXPORT jobject JNICALL Java_org_intel_openvino_CNNNetwork_getInputShapes(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_addOutput(JNIEnv *, jobject, jlong, jstring, jint); +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_addOutput1(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_CNNNetwork_delete(JNIEnv *, jobject, jlong); + +// +// InferRequest +// +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_Infer(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_StartAsync(JNIEnv *, jobject, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_InferRequest_Wait(JNIEnv *, jobject, jlong, jint); +JNIEXPORT jint JNICALL Java_org_intel_openvino_InferRequest_SetCompletionCallback(JNIEnv *, jobject, jlong, jobject); +JNIEXPORT long JNICALL Java_org_intel_openvino_InferRequest_GetBlob(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_SetBlob(JNIEnv *, jobject, jlong, jstring, jlong); +JNIEXPORT jobject JNICALL Java_org_intel_openvino_InferRequest_GetPerformanceCounts(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_InferRequest_delete(JNIEnv *, jobject, jlong); + +// +// ExecutableNetwork +// +JNIEXPORT jlong JNICALL Java_org_intel_openvino_ExecutableNetwork_CreateInferRequest(JNIEnv *, jobject, jlong); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_ExecutableNetwork_GetMetric(JNIEnv *, jobject, jlong, jstring); +JNIEXPORT void JNICALL Java_org_intel_openvino_ExecutableNetwork_delete(JNIEnv *, jobject, jlong); + +// +// Blob +// +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_GetTensorDesc(JNIEnv *, jobject, jlong); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_GetBlob(JNIEnv *, jobject, jlong); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobByte(JNIEnv *, jobject, jlong, jbyteArray); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobFloat(JNIEnv *, jobject, jlong, jfloatArray); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_BlobCArray(JNIEnv *, jobject, jlong, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_Blob_size(JNIEnv *, jobject ,jlong); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_Blob_rmap(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_Blob_delete(JNIEnv *, jobject, jlong); + +// +// LockedMemory +// +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_asByte(JNIEnv *, jobject, jlong, jbyteArray); +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_asFloat(JNIEnv *, jobject, jlong, jfloatArray); +JNIEXPORT void JNICALL Java_org_intel_openvino_LockedMemory_delete(JNIEnv *, jobject, jlong); + +// +// InputInfo +// +JNIEXPORT jlong JNICALL Java_org_intel_openvino_InputInfo_getPreProcess(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_InputInfo_SetLayout(JNIEnv *, jobject, jlong, jint); +JNIEXPORT jint JNICALL Java_org_intel_openvino_InputInfo_getLayout(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_InputInfo_SetPrecision(JNIEnv *, jobject, jlong, jint); +JNIEXPORT jint JNICALL Java_org_intel_openvino_InputInfo_getPrecision(JNIEnv *, jobject, jlong); +JNIEXPORT jlong JNICALL Java_org_intel_openvino_InputInfo_GetTensorDesc(JNIEnv *, jobject, jlong); + +// +// PreProcessInfo +// +JNIEXPORT void JNICALL Java_org_intel_openvino_PreProcessInfo_SetResizeAlgorithm(JNIEnv *, jobject, jlong, jint); + +// +// TensorDesc +// +JNIEXPORT jlong JNICALL Java_org_intel_openvino_TensorDesc_GetTensorDesc(JNIEnv *, jobject, jint, jintArray, jint); +JNIEXPORT jintArray JNICALL Java_org_intel_openvino_TensorDesc_GetDims(JNIEnv *, jobject, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_TensorDesc_getLayout(JNIEnv *, jobject, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_TensorDesc_getPrecision(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_TensorDesc_delete(JNIEnv *, jobject, jlong); + +// +// Parameter +// +JNIEXPORT jstring JNICALL Java_org_intel_openvino_Parameter_asString(JNIEnv *, jobject, jlong); +JNIEXPORT jint JNICALL Java_org_intel_openvino_Parameter_asInt(JNIEnv *, jobject, jlong); +JNIEXPORT void JNICALL Java_org_intel_openvino_Parameter_delete(JNIEnv *, jobject, jlong); + +#ifdef __cplusplus +} +#endif diff --git a/inference-engine/ie_bridges/java/cpp/parameter.cpp b/inference-engine/ie_bridges/java/cpp/parameter.cpp new file mode 100644 index 0000000..2420388 --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/parameter.cpp @@ -0,0 +1,49 @@ +#include +#include "openvino_java.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jstring JNICALL Java_org_intel_openvino_Parameter_asString(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "asString"; + try + { + Parameter *parameter = (Parameter *)addr; + return env->NewStringUTF(parameter->as().c_str()); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_Parameter_asInt(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "asInt"; + try + { + Parameter *parameter = (Parameter *)addr; + return static_cast(parameter->as()); + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_Parameter_delete(JNIEnv *, jobject, jlong addr) +{ + Parameter *parameter = (Parameter *)addr; + delete parameter; +} diff --git a/inference-engine/ie_bridges/java/cpp/pre_process_info.cpp b/inference-engine/ie_bridges/java/cpp/pre_process_info.cpp new file mode 100644 index 0000000..f002c34 --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/pre_process_info.cpp @@ -0,0 +1,30 @@ +#include + +#include "openvino_java.hpp" +#include "enum_mapping.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT void JNICALL Java_org_intel_openvino_PreProcessInfo_SetResizeAlgorithm(JNIEnv *env, jobject obj, jlong addr, jint resizeAlgorithm) +{ + static const char method_name[] = "SetResizeAlgorithm"; + + try + { + PreProcessInfo *pre_process_info = (PreProcessInfo*)addr; + auto it = resize_alg_map.find(resizeAlgorithm); + + if (it == resize_alg_map.end()) + throw std::runtime_error("Wrong resize algorithm number!"); + + pre_process_info->setResizeAlgorithm(it->second); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } +} diff --git a/inference-engine/ie_bridges/java/cpp/tensor_desc.cpp b/inference-engine/ie_bridges/java/cpp/tensor_desc.cpp new file mode 100644 index 0000000..7a15e0b --- /dev/null +++ b/inference-engine/ie_bridges/java/cpp/tensor_desc.cpp @@ -0,0 +1,111 @@ +#include + +#include "openvino_java.hpp" +#include "enum_mapping.hpp" +#include "jni_common.hpp" + +using namespace InferenceEngine; + +JNIEXPORT jlong JNICALL Java_org_intel_openvino_TensorDesc_GetTensorDesc(JNIEnv *env, jobject obj, jint precision, jintArray dims, jint layout) +{ + static const char method_name[] = "GetTensorDesc"; + try + { + auto l = precision_map.find(precision); + if (l == precision_map.end()) + throw std::runtime_error("No such precision value!"); + + auto pr = layout_map.find(layout); + if (pr == layout_map.end()) + throw std::runtime_error("No such layout value!"); + + auto n_precision = precision_map.at(precision); + auto n_layout = layout_map.at(layout); + + TensorDesc *tDesc = new TensorDesc(n_precision, jintArrayToVector(env, dims), n_layout); + + return (jlong)tDesc; + } catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jintArray JNICALL Java_org_intel_openvino_TensorDesc_GetDims(JNIEnv *env, jobject obj, jlong addr) +{ + static const char method_name[] = "GetDims"; + try + { + TensorDesc *tDesc = (TensorDesc *)addr; + std::vector size_t_dims = tDesc->getDims(); + + jintArray result = env->NewIntArray(size_t_dims.size()); + jint *arr = env->GetIntArrayElements(result, nullptr); + + for (int i = 0; i < size_t_dims.size(); ++i) + arr[i] = size_t_dims[i]; + + env->ReleaseIntArrayElements(result, arr, 0); + return result; + } + catch (const std::exception &e) + { + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_TensorDesc_getLayout(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getLayout"; + try + { + TensorDesc *tDesk = (TensorDesc *)addr; + Layout layout = tDesk->getLayout(); + + return find_by_value(layout_map, layout); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT jint JNICALL Java_org_intel_openvino_TensorDesc_getPrecision(JNIEnv *env, jobject, jlong addr) +{ + static const char method_name[] = "getPrecision"; + try + { + TensorDesc *tDesk = (TensorDesc *)addr; + Precision precision = tDesk->getPrecision(); + + return find_by_value(precision_map, precision); + + } catch (const std::exception &e){ + throwJavaException(env, &e, method_name); + } + catch (...) + { + throwJavaException(env, 0, method_name); + } + return 0; +} + +JNIEXPORT void JNICALL Java_org_intel_openvino_TensorDesc_delete(JNIEnv *, jobject, jlong addr) +{ + TensorDesc *tDesk = (TensorDesc *)addr; + delete tDesk; +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/Blob.java b/inference-engine/ie_bridges/java/org/intel/openvino/Blob.java new file mode 100644 index 0000000..054cfc7 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/Blob.java @@ -0,0 +1,58 @@ +package org.intel.openvino; + +public class Blob extends IEWrapper { + + protected Blob(long addr) { + super(addr); + } + + public Blob(TensorDesc tensorDesc) { + super(GetBlob(tensorDesc.getNativeObjAddr())); + } + + public Blob(TensorDesc tensorDesc, byte[] data) { + super(BlobByte(tensorDesc.getNativeObjAddr(), data)) ; + } + + public Blob(TensorDesc tensorDesc, float[] data) { + super(BlobFloat(tensorDesc.getNativeObjAddr(), data)); + } + + public Blob(TensorDesc tensorDesc, long cArray) { + super(BlobCArray(tensorDesc.nativeObj, cArray)); + } + + public TensorDesc getTensorDesc(){ + return new TensorDesc(GetTensorDesc(nativeObj)); + } + + public int size() { + return size(nativeObj); + } + + public LockedMemory rmap() { + return new LockedMemory(rmap(nativeObj)); + } + + /*----------------------------------- native methods -----------------------------------*/ + private native long GetTensorDesc(long addr); + + private static native long GetBlob(long tensorDesc); + + private static native long BlobByte(long tensorDesc, byte[] data); + + private static native long BlobFloat(long tensorDesc, float[] data); + + private static native long BlobCArray(long tensorDesc, long cArray); + + private static native byte[] asByte(long addr); + + private static native float[] asFloat(long addr); + + private static native int size(long addr); + + private static native long rmap(long addr); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/CNNNetwork.java b/inference-engine/ie_bridges/java/org/intel/openvino/CNNNetwork.java new file mode 100644 index 0000000..85c6c09 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/CNNNetwork.java @@ -0,0 +1,62 @@ +package org.intel.openvino; + +import java.util.Map; + +public class CNNNetwork extends IEWrapper { + + protected CNNNetwork(long addr) { + super(addr); + } + + public String getName(){ + return getName(nativeObj); + } + + public int getBatchSize(){ + return getBatchSize(nativeObj); + } + + public Map getOutputsInfo() { + return GetOutputsInfo(nativeObj); + } + + public Map getInputsInfo(){ + return GetInputsInfo(nativeObj); + } + + public void reshape(Map inputShapes) { + reshape(nativeObj, inputShapes); + } + + public Map getInputShapes(){ + return getInputShapes(nativeObj); + } + + public void addOutput(String layerName, int outputIndex) { + addOutput(nativeObj, layerName, outputIndex); + } + + public void addOutput(String layerName) { + addOutput1(nativeObj, layerName); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native String getName(long addr); + + private static native int getBatchSize(long addr); + + private static native Map GetInputsInfo(long addr); + + private static native Map GetOutputsInfo(long addr); + + private static native void reshape(long addr, Map inputShapes); + + private static native Map getInputShapes(long addr); + + private static native void addOutput(long addr, String layerName, int outputIndex); + + private static native void addOutput1(long addr, String layerName); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/Data.java b/inference-engine/ie_bridges/java/org/intel/openvino/Data.java new file mode 100644 index 0000000..384ccce --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/Data.java @@ -0,0 +1,8 @@ +package org.intel.openvino; + +public class Data extends IEWrapper{ + + protected Data(long addr) { + super(addr); + } +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/ExecutableNetwork.java b/inference-engine/ie_bridges/java/org/intel/openvino/ExecutableNetwork.java new file mode 100644 index 0000000..c315b30 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/ExecutableNetwork.java @@ -0,0 +1,24 @@ +package org.intel.openvino; + +public class ExecutableNetwork extends IEWrapper { + + protected ExecutableNetwork(long addr) { + super(addr); + } + + public InferRequest CreateInferRequest() { + return new InferRequest(CreateInferRequest(nativeObj)); + } + + public Parameter GetMetric(String name) { + return new Parameter(GetMetric(nativeObj, name)); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native long CreateInferRequest(long addr); + + private static native long GetMetric(long addr, String name); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/IECore.java b/inference-engine/ie_bridges/java/org/intel/openvino/IECore.java new file mode 100644 index 0000000..a6c07d0 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/IECore.java @@ -0,0 +1,95 @@ +package org.intel.openvino; + +import java.util.Map; + +public class IECore extends IEWrapper { + public static final String NATIVE_LIBRARY_NAME = "inference_engine_java_api"; + + public IECore() { + super(GetCore()); + } + + public IECore(String xmlConfigFile) { + super(GetCore_1(xmlConfigFile)); + } + + public CNNNetwork ReadNetwork(final String modelPath, final String weightPath) { + return new CNNNetwork(ReadNetwork1(nativeObj, modelPath, weightPath)); + } + + public CNNNetwork ReadNetwork(final String modelFileName) { + return new CNNNetwork(ReadNetwork(nativeObj, modelFileName)); + } + + public ExecutableNetwork LoadNetwork(CNNNetwork net, final String device) { + return new ExecutableNetwork(LoadNetwork(nativeObj, net.getNativeObjAddr(), device)); + } + + public ExecutableNetwork LoadNetwork(CNNNetwork net, final String device, final Map config) { + return new ExecutableNetwork(LoadNetwork1(nativeObj, net.getNativeObjAddr(), device, config)); + } + + public void RegisterPlugin(String pluginName, String deviceName) { + RegisterPlugin(nativeObj, pluginName, deviceName); + } + + public void RegisterPlugin(String xmlConfigFile) { + RegisterPlugins(nativeObj, xmlConfigFile); + } + + public void UnregisterPlugin(String deviceName) { + UnregisterPlugin(nativeObj, deviceName); + } + + public void AddExtension(String extension) { + AddExtension(nativeObj, extension); + } + + public void AddExtension(String extension, String deviceName) { + AddExtension1(nativeObj, extension, deviceName); + } + + public void SetConfig(Map config, String deviceName) { + SetConfig(nativeObj, config, deviceName); + } + + public void SetConfig(Map config) { + SetConfig1(nativeObj, config); + } + + public Parameter GetConfig(String deviceName, String name) { + return new Parameter(GetConfig(nativeObj, deviceName, name)); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native long ReadNetwork(long core, final String modelFileName); + + private static native long ReadNetwork1(long core, final String modelPath, final String weightPath); + + private static native long LoadNetwork(long core, long net, final String device); + + private static native long LoadNetwork1(long core, long net, final String device, final Map config); + + private static native void RegisterPlugin(long core, String pluginName, String deviceName); + + private static native void RegisterPlugins(long core, String xmlConfigFile); + + private static native void UnregisterPlugin(long core, String deviceName); + + private static native void AddExtension(long core, String extension); + + private static native void AddExtension1(long core, String extension, String deviceName); + + private static native void SetConfig(long core, Map config, String deviceName); + + private static native void SetConfig1(long core, Map config); + + private static native long GetConfig(long core, String deviceName, String name); + + private static native long GetCore(); + + private static native long GetCore_1(String xmlConfigFile); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/IEWrapper.java b/inference-engine/ie_bridges/java/org/intel/openvino/IEWrapper.java new file mode 100644 index 0000000..33652d1 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/IEWrapper.java @@ -0,0 +1,21 @@ +package org.intel.openvino; + +public class IEWrapper { + protected final long nativeObj; + + protected IEWrapper(long addr){ + nativeObj = addr; + } + + protected long getNativeObjAddr() { + return nativeObj; + } + + @Override + protected void finalize() throws Throwable { + delete(nativeObj); + } + + /*----------------------------------- native methods -----------------------------------*/ + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/InferRequest.java b/inference-engine/ie_bridges/java/org/intel/openvino/InferRequest.java new file mode 100644 index 0000000..46c99d9 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/InferRequest.java @@ -0,0 +1,56 @@ +package org.intel.openvino; + +import java.util.Map; + +public class InferRequest extends IEWrapper { + + protected InferRequest(long addr) { + super(addr); + } + + public void Infer() { + Infer(nativeObj); + } + + public Blob GetBlob(String name) { + return new Blob(GetBlob(nativeObj, name)); + } + + public void SetBlob(String name, Blob blob) { + SetBlob(nativeObj, name, blob.getNativeObjAddr()); + } + + public void StartAsync() { + StartAsync(nativeObj); + } + + public StatusCode Wait(WaitMode waitMode) { + return StatusCode.valueOf(Wait(nativeObj, waitMode.getValue())); + } + + public void SetCompletionCallback(Runnable runnable){ + SetCompletionCallback(nativeObj, runnable); + } + + public Map GetPerformanceCounts() { + return GetPerformanceCounts(nativeObj); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native void Infer(long addr); + + private static native void StartAsync(long addr); + + private static native int Wait(long addr, int wait_mode); + + private static native void SetCompletionCallback(long addr, Runnable runnable); + + private static native long GetBlob(long addr, String name); + + private static native void SetBlob(long addr, String name, long blobAddr); + + private static native Map GetPerformanceCounts(long addr); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/InferenceEngineProfileInfo.java b/inference-engine/ie_bridges/java/org/intel/openvino/InferenceEngineProfileInfo.java new file mode 100644 index 0000000..c7bc86c --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/InferenceEngineProfileInfo.java @@ -0,0 +1,49 @@ +package org.intel.openvino; + +import java.util.HashMap; +import java.util.Map; + +public class InferenceEngineProfileInfo { + public enum LayerStatus { + NOT_RUN(0), + OPTIMIZED_OUT(1), + EXECUTED(2); + + private int value; + private static Map map = new HashMap(); + + static { + for (LayerStatus layerStatus : LayerStatus.values()) { + map.put(layerStatus.value, layerStatus); + } + } + + LayerStatus(int value) { + this.value = value; + } + + int getValue() { + return value; + } + + static LayerStatus valueOf(int value) { + return map.get(value); + } + } + + public LayerStatus status; + public long realTimeUSec; + public long cpuUSec; + public String execType; + public String layerType; + public int executionIndex; + + public InferenceEngineProfileInfo(LayerStatus status, long realTimeUSec, long cpuUSec, String execType, String layerType, int executionIndex) { + this.status = status; + this.realTimeUSec = realTimeUSec; + this.cpuUSec = cpuUSec; + this.execType = execType; + this.layerType = layerType; + this.executionIndex = executionIndex; + } +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/InputInfo.java b/inference-engine/ie_bridges/java/org/intel/openvino/InputInfo.java new file mode 100644 index 0000000..4579947 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/InputInfo.java @@ -0,0 +1,45 @@ +package org.intel.openvino; + +public class InputInfo extends IEWrapper{ + + public InputInfo(long addr) { + super(addr); + } + + public PreProcessInfo getPreProcess() { + return new PreProcessInfo(getPreProcess(nativeObj)); + } + + public void setLayout(Layout layout) { + SetLayout(nativeObj, layout.getValue()); + } + + public Layout getLayout(){ + return Layout.valueOf(getLayout(nativeObj)); + } + + public void setPrecision(Precision precision) { + SetPrecision(nativeObj, precision.getValue()); + } + + public Precision getPrecision(){ + return Precision.valueOf(getPrecision(nativeObj)); + } + + public TensorDesc getTensorDesc(){ + return new TensorDesc(GetTensorDesc(nativeObj)); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native long getPreProcess(long addr); + + private static native void SetLayout(long addr, int layout); + + private static native int getLayout(long addr); + + private static native void SetPrecision(long addr, int precision); + + private static native int getPrecision(long addr); + + private native long GetTensorDesc(long addr); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/Layout.java b/inference-engine/ie_bridges/java/org/intel/openvino/Layout.java new file mode 100644 index 0000000..3744f54 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/Layout.java @@ -0,0 +1,47 @@ +package org.intel.openvino; + +import java.util.HashMap; +import java.util.Map; + +public enum Layout { + ANY(0), + NCHW(1), + NHWC(2), + NCDHW(3), + NDHWC(4), + + OIHW(64), + + SCALAR(95), + + C(96), + + CHW(128), + + HW(192), + NC(193), + CN(194), + + BLOCKED(200); + + private int value; + private static Map map = new HashMap(); + + static { + for (Layout layout : Layout.values()) { + map.put(layout.value, layout); + } + } + + private Layout(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + + static Layout valueOf(int value) { + return map.get(value); + } +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/LockedMemory.java b/inference-engine/ie_bridges/java/org/intel/openvino/LockedMemory.java new file mode 100644 index 0000000..88ee660 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/LockedMemory.java @@ -0,0 +1,24 @@ +package org.intel.openvino; + +public class LockedMemory extends IEWrapper { + + protected LockedMemory(long addr) { + super(addr); + } + + public void get(float[] res) { + asFloat(nativeObj, res); + } + + public void get(byte[] res) { + asByte(nativeObj, res); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native void asByte(long addr, byte[] res); + + private static native void asFloat(long addr, float[] res); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/Parameter.java b/inference-engine/ie_bridges/java/org/intel/openvino/Parameter.java new file mode 100644 index 0000000..a0e8cad --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/Parameter.java @@ -0,0 +1,23 @@ +package org.intel.openvino; + +public class Parameter extends IEWrapper { + public Parameter(long addr) { + super(addr); + } + + public int asInt() { + return asInt(nativeObj); + } + + public String asString() { + return asString(nativeObj); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native String asString(long addr); + + private static native int asInt(long addr); + + @Override + protected native void delete(long nativeObj); +} \ No newline at end of file diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/PreProcessInfo.java b/inference-engine/ie_bridges/java/org/intel/openvino/PreProcessInfo.java new file mode 100644 index 0000000..a236885 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/PreProcessInfo.java @@ -0,0 +1,15 @@ +package org.intel.openvino; + +public class PreProcessInfo extends IEWrapper{ + + public PreProcessInfo(long addr) { + super(addr); + } + + public void setResizeAlgorithm(ResizeAlgorithm resizeAlgorithm) { + SetResizeAlgorithm(nativeObj, resizeAlgorithm.getValue()); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native void SetResizeAlgorithm(long addr, int resizeAlgorithm); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/Precision.java b/inference-engine/ie_bridges/java/org/intel/openvino/Precision.java new file mode 100644 index 0000000..a6eff70 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/Precision.java @@ -0,0 +1,42 @@ +package org.intel.openvino; + +import java.util.HashMap; +import java.util.Map; + +public enum Precision { + UNSPECIFIED(255), + MIXED(0), + FP32(10), + FP16(11), + Q78(20), + I16(30), + U8(40), + I8(50), + U16(60), + I32(70), + I64(72), + BIN(71), + CUSTOM(80); + + private int value; + private static Map map = new HashMap(); + + static { + for (Precision precision : Precision.values()) { + map.put(precision.value, precision); + } + } + + private Precision(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + + static Precision valueOf(int value) { + return map.get(value); + } +} + \ No newline at end of file diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/ResizeAlgorithm.java b/inference-engine/ie_bridges/java/org/intel/openvino/ResizeAlgorithm.java new file mode 100644 index 0000000..3e037ac --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/ResizeAlgorithm.java @@ -0,0 +1,15 @@ +package org.intel.openvino; + +public enum ResizeAlgorithm { + NO_RESIZE(0), RESIZE_BILINEAR(1), RESIZE_AREA(2); + + private int value; + + private ResizeAlgorithm(int value) { + this.value = value; + } + + public int getValue() { + return value; + } +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/StatusCode.java b/inference-engine/ie_bridges/java/org/intel/openvino/StatusCode.java new file mode 100644 index 0000000..5ba8b43 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/StatusCode.java @@ -0,0 +1,33 @@ +package org.intel.openvino; + +import java.util.Map; +import java.util.HashMap; + +public enum StatusCode { + OK(0), GENERAL_ERROR(-1), NOT_IMPLEMENTED(-2), NETWORK_NOT_LOADED(-3), + PARAMETER_MISMATCH(-4), NOT_FOUND(-5), OUT_OF_BOUNDS(-6), UNEXPECTED(-7), + REQUEST_BUSY(-8), RESULT_NOT_READY(-9), NOT_ALLOCATED(-10), INFER_NOT_STARTED(-11), + NETWORK_NOT_READ(-12); + + private int value; + + private static Map map = new HashMap(); + + static { + for (StatusCode statusCode : StatusCode.values()) { + map.put(statusCode.value, statusCode); + } + } + + private StatusCode(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + + public static StatusCode valueOf(int value) { + return map.get(value); + } +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/TensorDesc.java b/inference-engine/ie_bridges/java/org/intel/openvino/TensorDesc.java new file mode 100644 index 0000000..24da3a4 --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/TensorDesc.java @@ -0,0 +1,38 @@ +package org.intel.openvino; + +import java.util.concurrent.BlockingDeque; + +public class TensorDesc extends IEWrapper { + + public TensorDesc(long addr){ + super(addr); + } + + public TensorDesc(Precision precision, int[] dims, Layout layout) { + super(GetTensorDesc(precision.getValue(), dims, layout.getValue())); + } + + public int[] getDims() { + return GetDims(nativeObj); + } + + public Layout getLayout(){ + return Layout.valueOf(getLayout(nativeObj)); + } + + public Precision getPrecision(){ + return Precision.valueOf(getPrecision(nativeObj)); + } + + /*----------------------------------- native methods -----------------------------------*/ + private static native long GetTensorDesc(int precision, int[] dims, int layout); + + private native int[] GetDims(long addr); + + private static native int getLayout(long addr); + + private static native int getPrecision(long addr); + + @Override + protected native void delete(long nativeObj); +} diff --git a/inference-engine/ie_bridges/java/org/intel/openvino/WaitMode.java b/inference-engine/ie_bridges/java/org/intel/openvino/WaitMode.java new file mode 100644 index 0000000..8daf17c --- /dev/null +++ b/inference-engine/ie_bridges/java/org/intel/openvino/WaitMode.java @@ -0,0 +1,15 @@ +package org.intel.openvino; + +public enum WaitMode { + RESULT_READY(-1), STATUS_ONLY(0); + + private int value; + + private WaitMode(int value) { + this.value = value; + } + + public int getValue() { + return value; + } +} \ No newline at end of file diff --git a/inference-engine/ie_bridges/java/samples/ArgumentParser.java b/inference-engine/ie_bridges/java/samples/ArgumentParser.java new file mode 100644 index 0000000..9d48f85 --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/ArgumentParser.java @@ -0,0 +1,67 @@ +import java.util.Map; +import java.util.HashMap; + +public class ArgumentParser { + private Map input; + private Map description; + private String help; + + ArgumentParser(String help) { + input = new HashMap<>(); + description = new HashMap<>(); + this.help = help; + } + + public void addArgument(String arg, String help) { + description.put(arg, help); + } + + private void printHelp() { + System.out.println(help); + System.out.println('\n' + "Options:"); + for (Map.Entry entry : description.entrySet()) { + System.out.println(" " + entry.getKey() + " " + entry.getValue()); + } + } + + public void parseArgs(String[] args) { + try{ + for(int i = 0; i < args.length; i++) { + String arg = args[i]; + if (arg.equals("--help") | arg.equals("-h")) { + printHelp(); + System.exit(0); + } else { + if (description.containsKey(arg)) { + input.put(arg, args[++i]); + } else { + System.out.println("Non-existent key: '" + arg + "'"); + System.exit(0); + } + } + } + } catch(ArrayIndexOutOfBoundsException e) { + System.out.println("Error: Incorrect number of arguments"); + System.exit(0); + } + } + + private String get(String flag) { + return input.get(flag); + } + + public String get(String flag, String defaultValue) { + String res = input.get(flag); + return (res != null) ? res : defaultValue; + } + + public int getInteger(String flag, int defaultValue) { + String res = get(flag); + return (res != null) ? Integer.parseInt(res) : defaultValue; + } + + public boolean getBoolean(String flag, boolean defaultValue) { + String res = get(flag); + return (res != null) ? Boolean.parseBoolean(res) : defaultValue; + } +} diff --git a/inference-engine/ie_bridges/java/samples/README.md b/inference-engine/ie_bridges/java/samples/README.md new file mode 100644 index 0000000..d1f2762 --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/README.md @@ -0,0 +1,99 @@ +# Benchmark Application + +This guide describes how to run the benchmark applications. + +## How It Works + +Upon start-up, the application reads command-line parameters and loads a network to the Inference Engine plugin, which is chosen depending on a specified device. The number of infer requests and execution approach depend on the mode defined with the `-api` command-line parameter. + +## Build +Create an environment variable with Inference Engine installation path: +export IE_PATH=/path/to/openvino/bin/intel64/Release/lib +``` + +To create java library and java samples for Inference Engine add `-DENABLE_JAVA=ON` flag in cmake command while building dldt: +```bash +cd /openvino/build +cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_JAVA=ON -DENABLE_SAMPLES=ON .. +make +``` + +## Running +Add library path for openvino java library before running: +```bash +export LD_LIBRARY_PATH=${IE_PATH}:$LD_LIBRARY_PATH +``` + +To get ```benchmark_app``` help use: +```bash +java -cp ".:${IE_PATH}/inference_engine_java_api.jar:${IE_PATH}/benchmark_app.jar" Main --help +``` + +To run ```benchmark_app`` use: +```bash +java -cp ".:${IE_PATH}/inference_engine_java_api.jar:${IE_PATH}/benchmark_app.jar" Main -m /path/to/model +``` + +## Application Output + +The application outputs the number of executed iterations, total duration of execution, latency, and throughput. + +Below is fragment of application output for CPU device: + +``` +[Step 10/11] Measuring performance (Start inference asyncronously, 4 inference requests using 4 streams for CPU, limits: 60000 ms duration) +[Step 11/11] Dumping statistics report +Count: 8904 iterations +Duration: 60045.87 ms +Latency: 27.03 ms +Throughput: 148.29 FPS +``` + +# Face Detection Java Samples + +## How It Works + +Upon the start-up the sample application reads command line parameters and loads a network and an image to the Inference +Engine device. When inference is done, the application creates an output image/video. + +To download model ( .bin and .xml files must be downloaded) use: +https://download.01.org/opencv/2019/open_model_zoo/R1/models_bin/face-detection-adas-0001/FP32/ + +## Build and run + +Build and run steps are similar to ```benchmark_app```, but you need to add OpenCV path. + +### Build +Add an environment variable with OpenCV installation or build path: +```bash +export OpenCV_DIR=/path/to/opencv/ +``` + +### Running +Add library path for opencv library and for openvino java library before running: + +* For OpenCV installation path +```bash +export LD_LIBRARY_PATH=${OpenCV_DIR}/share/java/opencv4/:/${IE_PATH}:$LD_LIBRARY_PATH +``` +To run sample use: +```bash +java -cp ".:${OpenCV_DIR}/share/java/opencv4/*:${IE_PATH}/inference_engine_java_api.jar:${IE_PATH}/sample_name.jar" Main -m /path/to/model -i /path/to/image +``` + +* For OpenCV build path +```bash +export LD_LIBRARY_PATH=${OpenCV_DIR}/lib:/${IE_PATH}:$LD_LIBRARY_PATH +``` +To run sample use: +```bash +java -cp ".:${OpenCV_DIR}/bin/*:${IE_PATH}/inference_engine_java_api.jar:${IE_PATH}/sample_name.jar" Main -m /path/to/model -i /path/to/image +``` + +## Sample Output + +### For ```face_detection_java_sample``` +The application will show the image with detected objects enclosed in rectangles in new window. It outputs the confidence value and the coordinates of the rectangle to the standard output stream. + +### For ```face_detection_sample_async``` +The application will show the video with detected objects enclosed in rectangles in new window. diff --git a/inference-engine/ie_bridges/java/samples/benchmark_app/InferReqWrap.java b/inference-engine/ie_bridges/java/samples/benchmark_app/InferReqWrap.java new file mode 100644 index 0000000..4e73bb0 --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/benchmark_app/InferReqWrap.java @@ -0,0 +1,54 @@ +import java.util.Map; + +import org.intel.openvino.*; + +public class InferReqWrap { + + public InferReqWrap(ExecutableNetwork net, int id, InferRequestsQueue irQueue) { + request = net.CreateInferRequest(); + this.id = id; + this.irQueue = irQueue; + request.SetCompletionCallback(new Runnable() { + + @Override + public void run() { + endTime = System.nanoTime(); + irQueue.putIdleRequest(id, getExecutionTimeInMilliseconds()); + } + }); + } + + void startAsync() { + startTime = System.nanoTime(); + request.StartAsync(); + } + + void _wait() { + request.Wait(WaitMode.RESULT_READY); + } + + void infer() { + startTime = System.nanoTime(); + request.Infer(); + endTime = System.nanoTime(); + irQueue.putIdleRequest(id, getExecutionTimeInMilliseconds()); + } + + Map getPerformanceCounts() { + return request.GetPerformanceCounts(); + } + + Blob getBlob(String name) { + return request.GetBlob(name); + } + + double getExecutionTimeInMilliseconds() { + return (double)(endTime - startTime) * 1e-6; + } + + InferRequest request; + private InferRequestsQueue irQueue; + private long startTime; + private long endTime; + int id; +} diff --git a/inference-engine/ie_bridges/java/samples/benchmark_app/InferRequestsQueue.java b/inference-engine/ie_bridges/java/samples/benchmark_app/InferRequestsQueue.java new file mode 100644 index 0000000..5b531e9 --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/benchmark_app/InferRequestsQueue.java @@ -0,0 +1,70 @@ +import java.util.Vector; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +import org.intel.openvino.*; + +public class InferRequestsQueue { + public InferRequestsQueue(ExecutableNetwork net, int nireq) { + for (int id = 0; id < nireq; id++) { + requests.add(new InferReqWrap(net, id, this)); + idleIds.add(id); + } + resetTimes(); + } + + void resetTimes() { + startTime = Long.MAX_VALUE; + endTime = Long.MIN_VALUE; + latencies.clear(); + } + + double getDurationInMilliseconds() { + return (double)(endTime - startTime) * 1e-6; + } + + void putIdleRequest(int id, double latency) { + latencies.add(latency); + idleIds.add(id); + endTime = Math.max(System.nanoTime(), endTime); + + synchronized (foo) { + foo.notify(); + } + } + + InferReqWrap getIdleRequest() { + try { + InferReqWrap request = requests.get(idleIds.take()); + startTime = Math.min(System.nanoTime(), startTime); + return request; + } catch (InterruptedException e) { + System.out.println(e.getMessage()); + } + return null; + } + + void waitAll() { + synchronized (foo) { + try { + while(idleIds.size() != requests.size()) { + foo.wait(); + } + } catch (InterruptedException e) { + System.out.println("InterruptedException: " + e.getMessage()); + } + } + } + + Vector getLatencies() { + return latencies; + } + + Vector requests = new Vector(); + private BlockingQueue idleIds = new LinkedBlockingQueue(); + private long startTime; + private long endTime; + Vector latencies = new Vector(); + + Object foo = new Object(); +} diff --git a/inference-engine/ie_bridges/java/samples/benchmark_app/Main.java b/inference-engine/ie_bridges/java/samples/benchmark_app/Main.java new file mode 100644 index 0000000..e0eefee --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/benchmark_app/Main.java @@ -0,0 +1,394 @@ +import java.util.Map; +import java.util.Vector; + +import javax.management.RuntimeErrorException; + +import java.util.Random; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.ArrayList; + +import java.util.Arrays; + +import org.intel.openvino.*; + +public class Main { + + static boolean adjustShapesBatch(Map shapes, int batchSize, Map inputInfo) { + boolean updated = false; + + for (Map.Entry entry : inputInfo.entrySet()) { + Layout layout = entry.getValue().getTensorDesc().getLayout(); + int batchIndex = -1; + if ((layout == Layout.NCHW) || (layout == Layout.NCDHW) || + (layout == Layout.NHWC) || (layout == Layout.NDHWC) || + (layout == Layout.NC)) { + batchIndex = 0; + } else if (layout == Layout.CN) { + batchIndex = 1; + } + if ((batchIndex != -1) && (shapes.get(entry.getKey())[batchIndex] != batchSize)) { + shapes.get(entry.getKey())[batchIndex] = batchSize; + updated = true; + } + } + return updated; + } + + static String setThroughputStreams(IECore core, Map device_config, String device, int nstreams, boolean isAsync) { + String key = device + "_THROUGHPUT_STREAMS"; + if (nstreams > 0) { + device_config.put(key, Integer.toString(nstreams)); + } else if (!device_config.containsKey(key) && isAsync) { + System.err.println("[ WARNING ] -nstreams default value is determined automatically for " + device + " device. " + + "Although the automatic selection usually provides a reasonable performance," + + "but it still may be non-optimal for some cases, for more information look at README."); + device_config.put(key, device + "_THROUGHPUT_AUTO"); + } + return device_config.get(key); + }; + + static void fillBlobs(Vector requests, Map inputsInfo) { + for (Map.Entry entry : inputsInfo.entrySet()) { + String inputName = entry.getKey(); + TensorDesc tDesc = entry.getValue().getTensorDesc(); + + System.err.print("[ INFO ] Network input '" + inputName + "' precision " + tDesc.getPrecision() + + ", dimensions (" + tDesc.getLayout() + "): "); + for (int dim : tDesc.getDims()) + System.err.print(dim + " "); + System.err.println(); + } + + for (int i = 0; i < requests.size(); i++) { + InferRequest request = requests.get(i).request; + for (Map.Entry entry : inputsInfo.entrySet()) { + String inputName = entry.getKey(); + TensorDesc tDesc = entry.getValue().getTensorDesc(); + request.SetBlob(inputName, blobRandomByte(tDesc)); + } + } + } + + static Blob blobRandomByte(TensorDesc tDesc) { + int dims[] = tDesc.getDims(); + + int size = 1; + for(int i = 0; i < dims.length; i++) { + size *= dims[i]; + } + + byte[] buff = new byte[size]; + Random rand = new Random(); + rand.nextBytes(buff); + + return new Blob(tDesc, buff); + } + + static double getMedianValue(Vector vec) { + Object[] objArr = vec.toArray(); + Double[] arr = Arrays.copyOf(objArr, objArr.length, Double[].class); + + Arrays.sort(arr); + + if (arr.length % 2 == 0) + return ((double)arr[arr.length / 2] + (double)arr[arr.length / 2 - 1]) / 2; + else + return (double)arr[arr.length / 2]; + } + + static boolean getApiBoolean(String api) throws RuntimeException { + if(api.equals("sync")) + return false; + else if(api.equals("async")) + return true; + else throw new RuntimeException("Incorrect argument: '-api'"); + } + + static int step = 0; + static void nextStep(String stepInfo) { + step += 1; + System.out.println("[Step " + step + "/11] " + stepInfo); + } + + static int deviceDefaultDeviceDurationInSeconds(String device) { + final Map deviceDefaultDurationInSeconds = new HashMap() {{ + put("CPU", 60 ); + put("GPU", 60 ); + put("VPU", 60 ); + put("MYRIAD", 60 ); + put("HDDL", 60 ); + put("FPGA", 120); + put("UNKNOWN", 120); + }}; + + Integer duration = deviceDefaultDurationInSeconds.get(device); + + if (duration == null) { + duration = deviceDefaultDurationInSeconds.get("UNKNOWN"); + System.err.println("[ WARNING ] Default duration " + duration + " seconds for unknown device '" + device + "' is used"); + } + return duration; + } + + static long getTotalMsTime(long startTimeMilliSec) { + return (System.currentTimeMillis() - startTimeMilliSec); + }; + + static long getDurationInMilliseconds(int seconds) { + return seconds * 1000L; + } + + public static void main(String[] args) { + try { + System.loadLibrary(IECore.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load Inference Engine library\n" + e); + System.exit(1); + } + + // ----------------- 1. Parsing and validating input arguments --------------------------------------------- + nextStep("Parsing and validating input arguments"); + + ArgumentParser parser = new ArgumentParser("This is benchmarking application"); + parser.addArgument("-m", "path to model .xml"); + parser.addArgument("-d", "device"); + parser.addArgument("-nireq", "number of infer requests"); + parser.addArgument("-niter", "number of iterations"); + parser.addArgument("-b", "batch size"); + parser.addArgument("-nthreads", "number of threads"); + parser.addArgument("-nstreams", "number of streams"); + parser.addArgument("-t", "time limit"); + parser.addArgument("-api", "sync or async"); + parser.parseArgs(args); + + String xmlPath = parser.get("-m", null); + String device = parser.get("-d", "CPU"); + int nireq = parser.getInteger("-nireq", 0); + int niter = parser.getInteger("-niter", 0); + int batchSize = parser.getInteger("-b", 0); + int nthreads = parser.getInteger("-nthreads", 0); + int nstreams = parser.getInteger("-nstreams", 0); + int timeLimit = parser.getInteger("-t",0); + String api = parser.get("-api", "async"); + boolean isAsync; + + try{ + isAsync = getApiBoolean(api); + } catch(RuntimeException e) { + System.out.println(e.getMessage()); + return; + } + + if(xmlPath == null) { + System.out.println("Error: Missed argument: -m"); + return; + } + + // ----------------- 2. Loading the Inference Engine -------------------------------------------------------- + nextStep("Loading the Inference Engine"); + + IECore core = new IECore(); + + // ----------------- 3. Setting device configuration -------------------------------------------------------- + nextStep("Setting device configuration"); + + Map device_config = new HashMap<>(); + + if (device.equals("CPU")) { // CPU supports few special performance-oriented keys + // limit threading for CPU portion of inference + if (nthreads > 0) + device_config.put("CPU_THREADS_NUM", Integer.toString(nthreads)); + + if (!device_config.containsKey("CPU_BIND_THREAD")) { + device_config.put("CPU_BIND_THREAD", "YES"); + } + + // for CPU execution, more throughput-oriented execution via streams + setThroughputStreams(core, device_config, device, nstreams, isAsync); + } else if (device.equals("GPU")) { + // for GPU execution, more throughput-oriented execution via streams + setThroughputStreams(core, device_config, device, nstreams, isAsync); + } else if (device.equals("MYRIAD")) { + device_config.put("LOG_LEVEL", "LOG_WARNING"); + } else if (device.equals("GNA")) { + device_config.put("GNA_PRECISION", "I16"); + + if (nthreads > 0) + device_config.put("GNA_LIB_N_THREADS", Integer.toString(nthreads)); + } + + core.SetConfig(device_config, device); + + // ----------------- 4. Reading the Intermediate Representation network ------------------------------------- + nextStep("Reading the Intermediate Representation network"); + + long startTime = System.currentTimeMillis(); + CNNNetwork net = core.ReadNetwork(xmlPath); + long durationMs = getTotalMsTime(startTime); + + System.err.println("[ INFO ] Read network took " + durationMs + " ms"); + + Map inputsInfo = net.getInputsInfo(); + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + InputInfo inputInfo = inputsInfo.get(inputName); + + // ----------------- 5. Resizing network to match image sizes and given batch ------------------------------- + nextStep("Resizing network to match image sizes and given batch"); + + int inputBatchSize = batchSize; + batchSize = net.getBatchSize(); + + Map shapes = net.getInputShapes(); + + if ((inputBatchSize != 0) && (batchSize != inputBatchSize)) { + adjustShapesBatch(shapes, batchSize, inputsInfo); + + startTime = System.currentTimeMillis(); + net.reshape(shapes); + durationMs = getTotalMsTime(startTime); + batchSize = net.getBatchSize(); + + System.err.println("[ INFO ] Reshape network took " + durationMs + " ms"); + } + + System.err.println((inputBatchSize != 0 ? "[ INFO ] Network batch size was changed to: " : "[ INFO ] Network batch size: ") + batchSize); + + // ----------------- 6. Configuring input ------------------------------------------------------------------- + nextStep("Configuring input"); + + inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR); + inputInfo.setPrecision(Precision.U8); + + // ----------------- 7. Loading the model to the device ----------------------------------------------------- + nextStep("Loading the model to the device"); + + startTime = System.currentTimeMillis(); + ExecutableNetwork executableNetwork = core.LoadNetwork(net, device); + durationMs = getTotalMsTime(startTime); + + System.err.println("[ INFO ] Load network took " + durationMs + " ms"); + + // ----------------- 8. Setting optimal runtime parameters -------------------------------------------------- + nextStep("Setting optimal runtime parameters"); + + // Update number of streams + nstreams = Integer.parseInt(core.GetConfig(device, device + "_THROUGHPUT_STREAMS").asString()); + + // Number of requests + if (nireq == 0) { + if (!isAsync) { + nireq = 1; + } else { + String key = "OPTIMAL_NUMBER_OF_INFER_REQUESTS"; + nireq = executableNetwork.GetMetric(key).asInt(); + } + } + + if ((niter > 0) && isAsync) { + int temp = niter; + niter = ((niter + nireq - 1) / nireq) * nireq; + if (temp != niter) { + System.err.println("[ INFO ] Number of iterations was aligned by request number from " + + temp + " to " + niter + " using number of requests " + nireq); + } + } + + // Time limit + int durationSeconds = 0; + if (timeLimit != 0) { + // time limit + durationSeconds = timeLimit; + } else if (niter == 0) { + // default time limit + durationSeconds = deviceDefaultDeviceDurationInSeconds(device); + } + durationMs = getDurationInMilliseconds(durationSeconds); + + // ----------------- 9. Creating infer requests and filling input blobs ------------------------------------- + nextStep("Creating infer requests and filling input blobs"); + + InferRequestsQueue inferRequestsQueue = new InferRequestsQueue(executableNetwork, nireq); + fillBlobs(inferRequestsQueue.requests, inputsInfo); + + // ----------------- 10. Measuring performance -------------------------------------------------------------- + String ss = "Start inference " + api + "ronously"; + if (isAsync) { + if (!ss.isEmpty()) { + ss += ", "; + } + ss = ss + nireq + " inference requests using " + nstreams + " streams for " + device; + } + ss += ", limits: "; + if (durationSeconds > 0) { + ss += durationMs + " ms duration"; + } + if (niter != 0) { + if (durationSeconds > 0) { + ss += ", "; + } + ss = ss + niter + " iterations"; + } + + nextStep("Measuring performance (" + ss + ")"); + + int iteration = 0; + InferReqWrap inferRequest = null; + + inferRequest = inferRequestsQueue.getIdleRequest(); + if (inferRequest == null) { + System.out.println("No idle Infer Requests!"); + return; + } + + if (isAsync) { + inferRequest.startAsync(); + } else { + inferRequest.infer(); + } + + inferRequestsQueue.waitAll(); + inferRequestsQueue.resetTimes(); + + startTime = System.currentTimeMillis(); + long execTime = getTotalMsTime(startTime); + + while ((niter != 0 && iteration < niter) || + (durationMs != 0L && execTime < durationMs) || + (isAsync && iteration % nireq != 0)) { + inferRequest = inferRequestsQueue.getIdleRequest(); + + if (isAsync) { + // As the inference request is currently idle, the wait() adds no additional overhead + //(and should return immediately). + // The primary reason for calling the method is exception checking/re-throwing. + // Callback, that governs the actual execution can handle errors as well, + // but as it uses just error codes it has no details like ‘what()’ method of `std::exception` + // So, rechecking for any exceptions here. + inferRequest._wait(); + inferRequest.startAsync(); + + } else { + inferRequest.infer(); + } + + iteration++; + execTime = getTotalMsTime(startTime); + } + + inferRequestsQueue.waitAll(); + + double latency = getMedianValue(inferRequestsQueue.getLatencies()); + double totalDuration = inferRequestsQueue.getDurationInMilliseconds(); + double fps = (!isAsync) ? batchSize * 1000.0 / latency : + batchSize * 1000.0 * iteration / totalDuration; + + // ----------------- 11. Dumping statistics report ---------------------------------------------------------- + nextStep("Dumping statistics report"); + + System.out.println("Count: " + iteration + " iterations"); + System.out.println("Duration: " + String.format("%.2f", totalDuration) + " ms"); + System.out.println("Latency: " + String.format("%.2f", latency) + " ms"); + System.out.println("Throughput: " + String.format("%.2f", fps) + " FPS"); + } +} diff --git a/inference-engine/ie_bridges/java/samples/face_detection_java_sample/Main.java b/inference-engine/ie_bridges/java/samples/face_detection_java_sample/Main.java new file mode 100644 index 0000000..222b1dd --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/face_detection_java_sample/Main.java @@ -0,0 +1,121 @@ +import org.opencv.core.*; +import org.opencv.imgcodecs.*; +import org.opencv.highgui.HighGui; +import org.opencv.imgproc.Imgproc; +import org.intel.openvino.*; + +import java.util.Map; +import java.util.Set; +import java.util.ArrayList; + +/* +This is face detection java sample. + +Upon the start-up the sample application reads command line parameters and loads a network +and an image to the Inference Engine device. When inference is done, the application will show +the image with detected objects enclosed in rectangles in new window.It also outputs the +confidence value and the coordinates of the rectangle to the standard output stream. + +To get the list of command line parameters run the application with `--help` paramether. +*/ +public class Main { + public static void main(String[] args) { + final double THRESHOLD = 0.7; + try { + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load OpenCV library\n" + e); + System.exit(1); + } + try { + System.loadLibrary(IECore.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load Inference Engine library\n" + e); + System.exit(1); + } + + ArgumentParser parser = new ArgumentParser("This is face detection sample"); + parser.addArgument("-i", "path to image"); + parser.addArgument("-m", "path to model .xml"); + parser.parseArgs(args); + + String imgPath = parser.get("-i", null); + String xmlPath = parser.get("-m", null); + + if(imgPath == null) { + System.out.println("Error: Missed argument: -i"); + return; + } + if(xmlPath == null) { + System.out.println("Error: Missed argument: -m"); + return; + } + + Mat image = Imgcodecs.imread(imgPath); + + byte[] buff = new byte[(int) (image.total() * image.channels())]; + image.get(0, 0, buff); + + int[] dimsArr = {1, image.channels(), image.height(), image.width()}; + TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC); + + Blob imgBlob = new Blob(tDesc, buff); + + IECore core = new IECore(); + + CNNNetwork net = core.ReadNetwork(xmlPath); + + Map inputsInfo = net.getInputsInfo(); + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + InputInfo inputInfo = inputsInfo.get(inputName); + + inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR); + inputInfo.setLayout(Layout.NHWC); + inputInfo.setPrecision(Precision.U8); + + String outputName = new ArrayList(net.getOutputsInfo().keySet()).get(0); + + ExecutableNetwork executableNetwork = core.LoadNetwork(net, "CPU"); + InferRequest inferRequest = executableNetwork.CreateInferRequest(); + + inferRequest.SetBlob(inputName, imgBlob); + inferRequest.Infer(); + + Blob output = inferRequest.GetBlob(outputName); + int dims[] = output.getTensorDesc().getDims(); + int maxProposalCount = dims[2]; + + float detection[] = new float[output.size()]; + output.rmap().get(detection); + + for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) { + int image_id = (int) detection[curProposal * 7]; + if (image_id < 0) + break; + + float confidence = detection[curProposal * 7 + 2]; + + // Drawing only objects with >70% probability + if (confidence < THRESHOLD) + continue; + + int label = (int) (detection[curProposal * 7 + 1]); + int xmin = (int) (detection[curProposal * 7 + 3] * image.cols()); + int ymin = (int) (detection[curProposal * 7 + 4] * image.rows()); + int xmax = (int) (detection[curProposal * 7 + 5] * image.cols()); + int ymax = (int) (detection[curProposal * 7 + 6] * image.rows()); + + System.out.println("[" + curProposal + "," + label + "] element, prob = " + confidence + " (" + xmin + + "," + ymin + ")-(" + xmax + "," + ymax + ")"); + System.out.println(" - WILL BE PRINTED!"); + + // Draw rectangle around detected object. + Imgproc.rectangle(image, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0)); + } + + HighGui.namedWindow("Detection", HighGui.WINDOW_AUTOSIZE); + HighGui.imshow("Detection", image); + HighGui.waitKey(0); + HighGui.destroyAllWindows(); + } +} diff --git a/inference-engine/ie_bridges/java/samples/face_detection_sample_async/Main.java b/inference-engine/ie_bridges/java/samples/face_detection_sample_async/Main.java new file mode 100644 index 0000000..a29e1d6 --- /dev/null +++ b/inference-engine/ie_bridges/java/samples/face_detection_sample_async/Main.java @@ -0,0 +1,264 @@ +import org.opencv.core.*; +import org.opencv.imgcodecs.*; +import org.opencv.videoio.*; +import org.opencv.imgproc.Imgproc; +import org.opencv.highgui.HighGui; + +import java.util.LinkedList; +import java.util.Vector; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.Map; +import java.util.Queue; +import java.util.ArrayList; +import java.util.HashMap; + +import org.intel.openvino.*; + +/* +This is async face detection java sample. + +Upon the start-up the sample application reads command line parameters and loads a network +and an images to the Inference Engine device. When inference is done, the application +shows the video with detected objects enclosed in rectangles in new window. + +To get the list of command line parameters run the application with `--help` paramether. +*/ +public class Main { + + public static Blob imageToBlob(Mat image) { + if (buff == null) + buff = new byte[(int) (image.total() * image.channels())]; + + image.get(0, 0, buff); + + int[] dimsArr = {1, image.channels(), image.height(), image.width()}; + TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC); + + return new Blob(tDesc, buff); + } + + static void processInferRequets(WaitMode wait) { + int size = 0; + float[] res = null; + + while (!startedRequestsIds.isEmpty()) { + int requestId = startedRequestsIds.peek(); + InferRequest inferRequest = inferRequests.get(requestId); + + if (inferRequest.Wait(wait) != StatusCode.OK) + return; + + if (size == 0 && res == null) { + size = inferRequest.GetBlob(outputName).size(); + res = new float[size]; + } + + inferRequest.GetBlob(outputName).rmap().get(res); + detectionOutput.add(res); + + resultCounter++; + + asyncInferIsFree.setElementAt(true, requestId); + startedRequestsIds.remove(); + } + } + + public static void main(String[] args) { + try { + System.loadLibrary(Core.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load OpenCV library\n" + e); + System.exit(1); + } + try { + System.loadLibrary(IECore.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load Inference Engine library\n" + e); + System.exit(1); + } + + ArgumentParser parser = new ArgumentParser("This is async face detection sample"); + parser.addArgument("-i", "path to video"); + parser.addArgument("-m", "path to model .xml"); + parser.addArgument("-d", "device"); + parser.addArgument("-nireq", "number of infer requests"); + parser.parseArgs(args); + + String imgsPath = parser.get("-i", null); + String xmlPath = parser.get("-m", null); + String device = parser.get("-d", "CPU"); + int inferRequestsSize = parser.getInteger("-nireq", 2); + + if(imgsPath == null ) { + System.out.println("Error: Missed argument: -i"); + return; + } + if(xmlPath == null) { + System.out.println("Error: Missed argument: -m"); + return; + } + + int warmupNum = inferRequestsSize * 2; + + BlockingQueue framesQueue = new LinkedBlockingQueue(); + + Thread captureThread = new Thread(new Runnable() { + @Override + public void run() { + VideoCapture cam = new VideoCapture(); + cam.open(imgsPath); + Mat frame = new Mat(); + + while (cam.read(frame)) { + framesCounter++; + framesQueue.add(frame.clone()); + } + } + }); + + Thread inferThread = new Thread(new Runnable() { + + @Override + public void run() { + try { + IECore core = new IECore(); + CNNNetwork net = core.ReadNetwork(xmlPath); + + Map inputsInfo = net.getInputsInfo(); + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + InputInfo inputInfo = inputsInfo.get(inputName); + + inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR); + inputInfo.setLayout(Layout.NHWC); + inputInfo.setPrecision(Precision.U8); + + outputName = new ArrayList(net.getOutputsInfo().keySet()).get(0); + + ExecutableNetwork executableNetwork = core.LoadNetwork(net, device); + + asyncInferIsFree = new Vector(inferRequestsSize); + + for (int i = 0; i < inferRequestsSize; i++) { + inferRequests.add(executableNetwork.CreateInferRequest()); + asyncInferIsFree.add(true); + } + + boolean isRunning = true; + + while (captureThread.isAlive() || !framesQueue.isEmpty()) { + processInferRequets(WaitMode.STATUS_ONLY); + + for (int i = 0; i < inferRequestsSize; i++) { + if (!asyncInferIsFree.get(i)) + continue; + + Mat frame = framesQueue.poll(0, TimeUnit.SECONDS); + + if (frame == null) + break; + + InferRequest request = inferRequests.get(i); + + asyncInferIsFree.setElementAt(false, i); + processedFramesQueue.add(frame); // predictionsQueue is used in rendering + + Blob imgBlob = imageToBlob(frame); + request.SetBlob(inputName, imgBlob); + + startedRequestsIds.add(i); + request.StartAsync(); + } + } + processInferRequets(WaitMode.RESULT_READY); + } catch (InterruptedException e) { + e.printStackTrace(); + + for (Thread t : Thread.getAllStackTraces().keySet()) + if (t.getState()==Thread.State.RUNNABLE) + t.interrupt(); + } + } + }); + + captureThread.start(); + inferThread.start(); + + TickMeter tm = new TickMeter(); + try { + while (inferThread.isAlive() || !detectionOutput.isEmpty()) { + + float[] detection = detectionOutput.poll(waitingTime, TimeUnit.SECONDS); + if (detection == null) + continue; + + Mat img = processedFramesQueue.poll(waitingTime, TimeUnit.SECONDS); + int maxProposalCount = detection.length / 7; + + for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) { + int imageId = (int) detection[curProposal * 7]; + if (imageId < 0) + break; + + float confidence = detection[curProposal * 7 + 2]; + + // Drawing only objects with >70% probability + if (confidence < CONFIDENCE_THRESHOLD) + continue; + + int label = (int) (detection[curProposal * 7 + 1]); + int xmin = (int) (detection[curProposal * 7 + 3] * img.cols()); + int ymin = (int) (detection[curProposal * 7 + 4] * img.rows()); + int xmax = (int) (detection[curProposal * 7 + 5] * img.cols()); + int ymax = (int) (detection[curProposal * 7 + 6] * img.rows()); + + // Draw rectangle around detected object. + Imgproc.rectangle(img, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0), 2); + } + + if (resultCounter == warmupNum) { + tm.start(); + } else if (resultCounter > warmupNum) { + tm.stop(); + double worksFps = ((double)(resultCounter - warmupNum)) / tm.getTimeSec(); + double readFps = ((double)(framesCounter - warmupNum)) / tm.getTimeSec(); + tm.start(); + + Imgproc.putText(img, "Reading fps: " + String.format("%.3f", readFps), new Point(10, 50), 0 , 0.7, new Scalar(0, 255, 0), 1); + Imgproc.putText(img, "Inference fps: " + String.format("%.3f", worksFps), new Point(10, 80), 0 , 0.7, new Scalar(0, 255, 0), 1); + } + + HighGui.imshow("Detection", img); + } + + captureThread.join(); + inferThread.join(); + + HighGui.waitKey(0); + HighGui.destroyAllWindows(); + + } catch (InterruptedException e) { + e.printStackTrace(); + for (Thread t : Thread.getAllStackTraces().keySet()) + if (t.getState()==Thread.State.RUNNABLE) + t.interrupt(); + } + } + + static final float CONFIDENCE_THRESHOLD = 0.7f; + static int waitingTime = 1; + + static BlockingQueue processedFramesQueue = new LinkedBlockingQueue(); + static BlockingQueue detectionOutput = new LinkedBlockingQueue(); + + static String outputName; + static Queue startedRequestsIds = new LinkedList(); + static Vector inferRequests = new Vector(); + static Vector asyncInferIsFree; + + static byte[] buff = null; + + static int framesCounter = 0; + static int resultCounter = 0; +} diff --git a/inference-engine/ie_bridges/java/tests/BlobTests.java b/inference-engine/ie_bridges/java/tests/BlobTests.java new file mode 100644 index 0000000..f9d5f93 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/BlobTests.java @@ -0,0 +1,30 @@ +import org.junit.Assert; + +import org.intel.openvino.*; + +public class BlobTests extends IETest { + + public void testGetBlob(){ + int[] dimsArr = {1, 3, 200, 200}; + TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC); + + Blob blob = new Blob(tDesc); + + Assert.assertArrayEquals(blob.getTensorDesc().getDims(), dimsArr); + } + + public void testGetBlobFromFloat(){ + int[] dimsArr = {1, 1, 2, 2}; + TensorDesc tDesc = new TensorDesc(Precision.FP32, dimsArr, Layout.NHWC); + + float[] data = {0.0f, 1.1f, 2.2f, 3.3f}; + + Blob blob = new Blob(tDesc, data); + + float detection[] = new float[blob.size()]; + blob.rmap().get(detection); + + Assert.assertArrayEquals(blob.getTensorDesc().getDims(), dimsArr); + Assert.assertArrayEquals(data, detection, 0.0f); + } +} diff --git a/inference-engine/ie_bridges/java/tests/CNNNetworkTests.java b/inference-engine/ie_bridges/java/tests/CNNNetworkTests.java new file mode 100644 index 0000000..8558e08 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/CNNNetworkTests.java @@ -0,0 +1,45 @@ +import org.junit.Assert; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +import org.intel.openvino.*; + +public class CNNNetworkTests extends IETest { + IECore core = new IECore(); + + public void testInputName() { + CNNNetwork net = core.ReadNetwork(modelXml); + Map inputsInfo = net.getInputsInfo(); + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + + assertEquals("Input name", "data", inputName); + } + + public void testReshape() { + CNNNetwork net = core.ReadNetwork(modelXml); + + Map input = new HashMap<>(); + int[] val = {1, 3, 34, 34}; + input.put("data", val); + + net.reshape(input); + Map res = net.getInputShapes(); + + Assert.assertArrayEquals(input.get("data"), res.get("data")); + } + + public void testAddOutput() { + CNNNetwork net = core.ReadNetwork(modelXml); + Map output = net.getOutputsInfo(); + + assertEquals("Input size", 1, output.size()); + + net.addOutput("19/WithoutBiases"); + output = net.getOutputsInfo(); + + assertEquals("Input size", 2, output.size()); + } + +} diff --git a/inference-engine/ie_bridges/java/tests/IECoreTests.java b/inference-engine/ie_bridges/java/tests/IECoreTests.java new file mode 100644 index 0000000..a101d00 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/IECoreTests.java @@ -0,0 +1,78 @@ +import org.intel.openvino.*; + +import java.util.Map; +import java.util.HashMap; + +public class IECoreTests extends IETest { + IECore core; + String exceptionMessage; + + @Override + protected void setUp() { + core = new IECore(); + exceptionMessage = ""; + } + + public void testInitIECore(){ + assertTrue(core instanceof IECore); + } + + public void testReadNetwork(){ + CNNNetwork net = core.ReadNetwork(modelXml, modelBin); + assertEquals("Network name", "test_model", net.getName()); + } + + public void testReadNetworkXmlOnly(){ + CNNNetwork net = core.ReadNetwork(modelXml); + assertEquals("Batch size", 1, net.getBatchSize()); + } + + public void testReadNetworkIncorrectXmlPath(){ + try{ + CNNNetwork net = core.ReadNetwork("model.xml", modelBin); + } catch (Exception e){ + exceptionMessage = e.getMessage(); + } + assertTrue(exceptionMessage.contains("Model file model.xml cannot be opened!")); + } + + public void testReadNetworkIncorrectBinPath(){ + try{ + CNNNetwork net = core.ReadNetwork(modelXml, "model.bin"); + } catch (Exception e){ + exceptionMessage = e.getMessage(); + } + assertTrue(exceptionMessage.contains("Weights file model.bin cannot be opened!")); + } + + public void testLoadNetwork(){ + CNNNetwork net = core.ReadNetwork(modelXml, modelBin); + ExecutableNetwork executableNetwork = core.LoadNetwork(net, device); + + assertTrue(executableNetwork instanceof ExecutableNetwork); + } + + public void testLoadNetworDeviceConfig(){ + CNNNetwork net = core.ReadNetwork(modelXml, modelBin); + + Map testMap = new HashMap(); + + //When specifying key values as raw strings, omit the KEY_ prefix + testMap.put("CPU_BIND_THREAD", "YES"); + testMap.put("CPU_THREADS_NUM", "1"); + + ExecutableNetwork executableNetwork = core.LoadNetwork(net, device, testMap); + + assertTrue(executableNetwork instanceof ExecutableNetwork); + } + + public void testLoadNetworkWrongDevice(){ + CNNNetwork net = core.ReadNetwork(modelXml, modelBin); + try{ + core.LoadNetwork(net, "DEVISE"); + } catch (Exception e){ + exceptionMessage = e.getMessage(); + } + assertTrue(exceptionMessage.contains("Device with \"DEVISE\" name is not registered in the InferenceEngine")); + } +} diff --git a/inference-engine/ie_bridges/java/tests/IETest.java b/inference-engine/ie_bridges/java/tests/IETest.java new file mode 100644 index 0000000..1fe3020 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/IETest.java @@ -0,0 +1,26 @@ +import junit.framework.TestCase; + +import java.nio.file.Paths; +import java.lang.Class; +import java.util.List; + +import org.intel.openvino.*; + +public class IETest extends TestCase { + String modelXml; + String modelBin; + String device; + + public IETest(){ + try { + System.loadLibrary(IECore.NATIVE_LIBRARY_NAME); + } catch (UnsatisfiedLinkError e) { + System.err.println("Failed to load Inference Engine library\n" + e); + System.exit(1); + } + + modelXml = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.xml").toString(); + modelBin = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.bin").toString(); + device = "CPU"; + } +} diff --git a/inference-engine/ie_bridges/java/tests/InferRequestTests.java b/inference-engine/ie_bridges/java/tests/InferRequestTests.java new file mode 100644 index 0000000..dc4113d --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/InferRequestTests.java @@ -0,0 +1,94 @@ +import java.util.Map; +import java.util.Vector; +import java.util.ArrayList; + +import org.intel.openvino.*; +import org.intel.openvino.InferenceEngineProfileInfo.LayerStatus; + +public class InferRequestTests extends IETest { + IECore core; + CNNNetwork net; + ExecutableNetwork executableNetwork; + InferRequest inferRequest; + boolean completionCallback; + + @Override + protected void setUp() { + core = new IECore(); + net = core.ReadNetwork(modelXml); + executableNetwork = core.LoadNetwork(net, "CPU"); + inferRequest = executableNetwork.CreateInferRequest(); + completionCallback = false; + } + + public void testGetPerformanceCounts(){ + inferRequest.Infer(); + + Vector layer_name = new Vector<>(); + layer_name.add("19/Fused_Add_"); + layer_name.add("21"); + layer_name.add("22"); + layer_name.add("23"); + layer_name.add("24/Fused_Add_"); + layer_name.add("26"); + layer_name.add("27"); + layer_name.add("29"); + layer_name.add("fc_out"); + layer_name.add("out_fc_out"); + + Vector exec_type = new Vector<>(); + exec_type.add("Convolution"); + exec_type.add("ReLU"); + exec_type.add("Pooling"); + exec_type.add("Convolution"); + exec_type.add("Convolution"); + exec_type.add("ReLU"); + exec_type.add("Pooling"); + exec_type.add("FullyConnected"); + exec_type.add("SoftMax"); + exec_type.add("Output"); + + Map res = inferRequest.GetPerformanceCounts(); + + assertEquals("Map size", layer_name.size(), res.size()); + ArrayList resKeySet = new ArrayList(res.keySet()); + + for (int i = 0; i < res.size(); i++){ + String key = resKeySet.get(i); + InferenceEngineProfileInfo resVal = res.get(key); + + assertEquals(key + " execType", key, layer_name.elementAt(i)); + assertEquals(key + " executionIndex", i, resVal.executionIndex); + assertTrue(resVal.status == InferenceEngineProfileInfo.LayerStatus.EXECUTED + || resVal.status == InferenceEngineProfileInfo.LayerStatus.NOT_RUN); + } + } + + public void testStartAsync() { + inferRequest.StartAsync(); + StatusCode statusCode = inferRequest.Wait(WaitMode.RESULT_READY); + + assertEquals("StartAsync", StatusCode.OK, statusCode); + } + + public void testSetCompletionCallback() { + inferRequest.SetCompletionCallback(new Runnable(){ + + @Override + public void run() { + completionCallback = true; + } + }); + + for(int i = 0; i < 5; i++) { + inferRequest.Wait(WaitMode.RESULT_READY); + inferRequest.StartAsync(); + } + + inferRequest.Wait(WaitMode.RESULT_READY); + inferRequest.StartAsync(); + StatusCode statusCode = inferRequest.Wait(WaitMode.RESULT_READY); + + assertEquals("SetCompletionCallback", true, completionCallback); + } +} diff --git a/inference-engine/ie_bridges/java/tests/InputInfoTests.java b/inference-engine/ie_bridges/java/tests/InputInfoTests.java new file mode 100644 index 0000000..f54b5bc --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/InputInfoTests.java @@ -0,0 +1,38 @@ +import java.util.ArrayList; +import java.util.Map; + +import org.intel.openvino.*; + +public class InputInfoTests extends IETest { + IECore core; + CNNNetwork net; + + @Override + protected void setUp() { + core = new IECore(); + } + + public void testSetLayout() { + net = core.ReadNetwork(modelXml); + Map inputsInfo = net.getInputsInfo(); + + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + InputInfo inputInfo = inputsInfo.get(inputName); + assertTrue(inputInfo.getLayout() != Layout.NHWC); + + inputInfo.setLayout(Layout.NHWC); + assertEquals("setLayout", Layout.NHWC, inputInfo.getLayout()); + } + + public void testSetPrecision() { + net = core.ReadNetwork(modelXml); + Map inputsInfo = net.getInputsInfo(); + + String inputName = new ArrayList(inputsInfo.keySet()).get(0); + InputInfo inputInfo = inputsInfo.get(inputName); + inputInfo.setPrecision(Precision.U8); + + assertEquals("setPrecision", Precision.U8, inputInfo.getPrecision()); + } + +} diff --git a/inference-engine/ie_bridges/java/tests/OpenVinoTestRunner.java b/inference-engine/ie_bridges/java/tests/OpenVinoTestRunner.java new file mode 100644 index 0000000..03e75d6 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/OpenVinoTestRunner.java @@ -0,0 +1,15 @@ +import org.junit.runner.JUnitCore; +import org.junit.runner.Result; +import org.junit.runner.notification.Failure; + +public class OpenVinoTestRunner { + public static void main(String[] args) { + Result result = JUnitCore.runClasses(TestsSuite.class); + + for (Failure failure : result.getFailures()) { + System.out.println(failure.toString()); + } + + System.out.println(result.wasSuccessful()); + } +} diff --git a/inference-engine/ie_bridges/java/tests/README.md b/inference-engine/ie_bridges/java/tests/README.md new file mode 100644 index 0000000..1d718a4 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/README.md @@ -0,0 +1,31 @@ +## Build +Create an environment variable with Inference Engine installation path: +```bash +export IE_PATH=/path/to/openvino/bin/intel64/Release/lib/ +``` + +To create java wrappers tests add `-DENABLE_JAVA=ON` and `-DENABLE_TESTS=ON` flags in cmake command while building openvino: +```bash +cd openvino +mkdir build && cd build +cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_JAVA=ON -DENABLE_TESTS=ON .. +make --jobs=$(nproc --all) +``` + +Link to repo with [test data](https://github.com/openvinotoolkit/testdata.git) + +## Running +Create an environment variable with testdata path: +```bash +export MODELS_PATH=/path/to/testdata +``` + +Add library path for openvino java library before running: +```bash +export LD_LIBRARY_PATH=${IE_PATH}:$LD_LIBRARY_PATH +``` + +To run tests use: +```bash +java -cp ".:${IE_PATH}/*" OpenVinoTestRunner +``` diff --git a/inference-engine/ie_bridges/java/tests/TestsSuite.java b/inference-engine/ie_bridges/java/tests/TestsSuite.java new file mode 100644 index 0000000..66ca210 --- /dev/null +++ b/inference-engine/ie_bridges/java/tests/TestsSuite.java @@ -0,0 +1,63 @@ +import org.junit.runner.RunWith; +import org.junit.runners.AllTests; + +import junit.framework.TestSuite; + +import java.util.List; +import java.util.ArrayList; +import java.util.zip.*; + +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.nio.file.Paths; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; + +import java.lang.Class; +import java.net.*; + +import org.intel.openvino.*; + +@RunWith(AllTests.class) + +public class TestsSuite extends IETest{ + + public static TestSuite suite() { + TestSuite suite = new TestSuite(); + try { + //get openvino_test.jar path + String dir = new File(TestsSuite.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getPath().toString(); + + List> results = findClasses(dir); + results.forEach(result->suite.addTest(new junit.framework.JUnit4TestAdapter(result))); + + } catch (ClassNotFoundException e) { + System.out.println("ClassNotFoundException: " + e.getMessage()); + } catch (URISyntaxException e){ + System.out.println("URISyntaxException: " + e.getMessage()); + } + return suite; + } + + private static List> findClasses(String directory) throws ClassNotFoundException { + List> classes = new ArrayList>(); + try { + ZipInputStream zip = new ZipInputStream(new FileInputStream(directory)); + for (ZipEntry entry = zip.getNextEntry(); entry != null; entry = zip.getNextEntry()) { + String name = entry.getName().toString(); + if (name.endsWith(".class") && !name.contains("$") && !name.contains("/") + && !name.equals("TestsSuite.class") && !name.equals("OpenVinoTestRunner.class") && !name.equals("IETest.class")) { + classes.add(Class.forName(name.substring(0, name.length() - ".class".length()))); + } + } + } catch(FileNotFoundException e){ + System.out.println("FileNotFoundException: " + e.getMessage()); + } catch(IOException e){ + System.out.println("IOException: " + e.getMessage()); + } + return classes; + } +}