useNNAPI(interpreterHandle, useNNAPI);
}
+ void setNumThreads(int numRecommendedThreads) {
+ numThreads(interpreterHandle, numRecommendedThreads);
+ }
+
/** Gets index of an input given its name. */
int getInputIndex(String name) {
if (inputsIndexes == null) {
private static native void useNNAPI(long interpreterHandle, boolean state);
+ private static native void numThreads(long interpreterHandle, int numRecommendedThreads);
+
private static native long createErrorReporter(int size);
private static native long createModel(String modelPathOrBuffer, long errorHandle);
interpreter->UseNNAPI(static_cast<bool>(state));
}
+JNIEXPORT void JNICALL
+Java_org_tensorflow_lite_NativeInterpreterWrapper_numThreads(JNIEnv* env,
+ jclass clazz,
+ jlong handle,
+ jint num_threads) {
+ tflite::Interpreter* interpreter = convertLongToInterpreter(env, handle);
+ if (interpreter == nullptr) return;
+ interpreter->SetNumThreads(static_cast<int>(num_threads));
+}
+
JNIEXPORT jlong JNICALL
Java_org_tensorflow_lite_NativeInterpreterWrapper_createErrorReporter(
JNIEnv* env, jclass clazz, jint size) {
/*
* Class: org_tensorflow_lite_NativeInterpreterWrapper
* Method:
+ * Signature: (JI)
+ */
+JNIEXPORT void JNICALL
+Java_org_tensorflow_lite_NativeInterpreterWrapper_numThreads(JNIEnv* env,
+ jclass clazz,
+ jlong handle,
+ jint num_threads);
+
+/*
+ * Class: org_tensorflow_lite_NativeInterpreterWrapper
+ * Method:
* Signature: (I)J
*/
JNIEXPORT jlong JNICALL
}
/**
+ * Sets the number of threads for an {@code Interpreter}.
+ *
+ * @param interpreter an instance of {@code Interpreter}. If it is not initialized, an {@code
+ * IllegalArgumentException} will be thrown.
+ * @param numRecommendedThreads an integer value indicating the number of recommended threads.
+ */
+ public static void setNumThreads(Interpreter interpreter, int numRecommendedThreads) {
+ if (interpreter != null && interpreter.wrapper != null) {
+ interpreter.wrapper.setNumThreads(numRecommendedThreads);
+ } else {
+ throw new IllegalArgumentException("Interpreter has not initialized; Failed to setUseNNAPI.");
+ }
+ }
+ /**
* Gets the last inference duration in nanoseconds. It returns null if there is no previous
* inference run or the last inference run failed.
*