if: failure()
run: |
ngraph/maint/apply-code-format.sh
- git diff >code_style_diff.patch
+ git diff >ngraph_code_style_diff.patch
- uses: actions/upload-artifact@v2
if: failure()
with:
- name: code_style_diff
- path: code_style_diff.patch
+ name: ngraph_code_style_diff
+ path: ngraph_code_style_diff.patch
+
+ Java:
+ runs-on: ubuntu-18.04
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions/setup-java@v1
+ with:
+ java-version: '11'
+
+ - name: Install dependencies
+ run: |
+ wget -nc https://github.com/google/google-java-format/releases/download/google-java-format-1.9/google-java-format-1.9-all-deps.jar
+
+ - name: Check code style
+ run: |
+ java -jar google-java-format-1.9-all-deps.jar --set-exit-if-changed -a -i $(find . -type f -name "*.java")
+
+ - name: Create code style diff
+ if: failure()
+ run: |
+ git diff >java_code_style_diff.patch
+
+ - uses: actions/upload-artifact@v2
+ if: failure()
+ with:
+ name: java_code_style_diff
+ path: java_code_style_diff.patch
}
public Blob(TensorDesc tensorDesc, byte[] data) {
- super(BlobByte(tensorDesc.getNativeObjAddr(), data)) ;
+ super(BlobByte(tensorDesc.getNativeObjAddr(), data));
}
public Blob(TensorDesc tensorDesc, float[] data) {
super(BlobCArray(tensorDesc.nativeObj, cArray));
}
- public TensorDesc getTensorDesc(){
+ public TensorDesc getTensorDesc() {
return new TensorDesc(GetTensorDesc(nativeObj));
}
super(addr);
}
- public String getName(){
+ public String getName() {
return getName(nativeObj);
}
- public int getBatchSize(){
+ public int getBatchSize() {
return getBatchSize(nativeObj);
}
return GetOutputsInfo(nativeObj);
}
- public Map<String, InputInfo> getInputsInfo(){
+ public Map<String, InputInfo> getInputsInfo() {
return GetInputsInfo(nativeObj);
}
reshape(nativeObj, inputShapes);
}
- public Map<String, int[]> getInputShapes(){
+ public Map<String, int[]> getInputShapes() {
return getInputShapes(nativeObj);
}
private static native int getBatchSize(long addr);
private static native Map<String, InputInfo> GetInputsInfo(long addr);
-
+
private static native Map<String, Data> GetOutputsInfo(long addr);
private static native void reshape(long addr, Map<String, int[]> inputShapes);
package org.intel.openvino;
-public class Data extends IEWrapper{
+public class Data extends IEWrapper {
protected Data(long addr) {
super(addr);
return new ExecutableNetwork(LoadNetwork(nativeObj, net.getNativeObjAddr(), device));
}
- public ExecutableNetwork LoadNetwork(CNNNetwork net, final String device, final Map<String, String> config) {
- return new ExecutableNetwork(LoadNetwork1(nativeObj, net.getNativeObjAddr(), device, config));
+ public ExecutableNetwork LoadNetwork(
+ CNNNetwork net, final String device, final Map<String, String> config) {
+ long network = LoadNetwork1(nativeObj, net.getNativeObjAddr(), device, config);
+ return new ExecutableNetwork(network);
}
public void RegisterPlugin(String pluginName, String deviceName) {
/*----------------------------------- native methods -----------------------------------*/
private static native long ReadNetwork(long core, final String modelFileName);
- private static native long ReadNetwork1(long core, final String modelPath, final String weightPath);
+ private static native long ReadNetwork1(
+ long core, final String modelPath, final String weightPath);
private static native long LoadNetwork(long core, long net, final String device);
- private static native long LoadNetwork1(long core, long net, final String device, final Map<String, String> config);
+ private static native long LoadNetwork1(
+ long core, long net, final String device, final Map<String, String> config);
private static native void RegisterPlugin(long core, String pluginName, String deviceName);
public class IEWrapper {
protected final long nativeObj;
- protected IEWrapper(long addr){
+ protected IEWrapper(long addr) {
nativeObj = addr;
}
return StatusCode.valueOf(Wait(nativeObj, waitMode.getValue()));
}
- public void SetCompletionCallback(Runnable runnable){
+ public void SetCompletionCallback(Runnable runnable) {
SetCompletionCallback(nativeObj, runnable);
}
public Map<String, InferenceEngineProfileInfo> GetPerformanceCounts() {
return GetPerformanceCounts(nativeObj);
- }
+ }
/*----------------------------------- native methods -----------------------------------*/
private static native void Infer(long addr);
public class InferenceEngineProfileInfo {
public enum LayerStatus {
- NOT_RUN(0),
- OPTIMIZED_OUT(1),
+ NOT_RUN(0),
+ OPTIMIZED_OUT(1),
EXECUTED(2);
-
+
private int value;
private static Map<Integer, LayerStatus> map = new HashMap<Integer, LayerStatus>();
-
+
static {
for (LayerStatus layerStatus : LayerStatus.values()) {
map.put(layerStatus.value, layerStatus);
}
}
-
+
LayerStatus(int value) {
this.value = value;
}
-
+
int getValue() {
return value;
}
-
+
static LayerStatus valueOf(int value) {
return map.get(value);
}
public String layerType;
public int executionIndex;
- public InferenceEngineProfileInfo(LayerStatus status, long realTimeUSec, long cpuUSec, String execType, String layerType, int executionIndex) {
+ public InferenceEngineProfileInfo(
+ LayerStatus status,
+ long realTimeUSec,
+ long cpuUSec,
+ String execType,
+ String layerType,
+ int executionIndex) {
this.status = status;
this.realTimeUSec = realTimeUSec;
this.cpuUSec = cpuUSec;
package org.intel.openvino;
-public class InputInfo extends IEWrapper{
+public class InputInfo extends IEWrapper {
public InputInfo(long addr) {
super(addr);
SetLayout(nativeObj, layout.getValue());
}
- public Layout getLayout(){
+ public Layout getLayout() {
return Layout.valueOf(getLayout(nativeObj));
}
SetPrecision(nativeObj, precision.getValue());
}
- public Precision getPrecision(){
+ public Precision getPrecision() {
return Precision.valueOf(getPrecision(nativeObj));
}
- public TensorDesc getTensorDesc(){
+ public TensorDesc getTensorDesc() {
return new TensorDesc(GetTensorDesc(nativeObj));
}
@Override
protected native void delete(long nativeObj);
-}
\ No newline at end of file
+}
package org.intel.openvino;
-public class PreProcessInfo extends IEWrapper{
+public class PreProcessInfo extends IEWrapper {
public PreProcessInfo(long addr) {
super(addr);
public enum Precision {
UNSPECIFIED(255),
- MIXED(0),
- FP32(10),
- FP16(11),
- Q78(20),
- I16(30),
- U8(40),
- I8(50),
- U16(60),
- I32(70),
- I64(72),
- BIN(71),
+ MIXED(0),
+ FP32(10),
+ FP16(11),
+ Q78(20),
+ I16(30),
+ U8(40),
+ I8(50),
+ U16(60),
+ I32(70),
+ I64(72),
+ BIN(71),
CUSTOM(80);
private int value;
return map.get(value);
}
}
-
\ No newline at end of file
package org.intel.openvino;
public enum ResizeAlgorithm {
- NO_RESIZE(0), RESIZE_BILINEAR(1), RESIZE_AREA(2);
+ NO_RESIZE(0),
+ RESIZE_BILINEAR(1),
+ RESIZE_AREA(2);
private int value;
package org.intel.openvino;
-import java.util.Map;
import java.util.HashMap;
+import java.util.Map;
public enum StatusCode {
- OK(0), GENERAL_ERROR(-1), NOT_IMPLEMENTED(-2), NETWORK_NOT_LOADED(-3),
- PARAMETER_MISMATCH(-4), NOT_FOUND(-5), OUT_OF_BOUNDS(-6), UNEXPECTED(-7),
- REQUEST_BUSY(-8), RESULT_NOT_READY(-9), NOT_ALLOCATED(-10), INFER_NOT_STARTED(-11),
+ OK(0),
+ GENERAL_ERROR(-1),
+ NOT_IMPLEMENTED(-2),
+ NETWORK_NOT_LOADED(-3),
+ PARAMETER_MISMATCH(-4),
+ NOT_FOUND(-5),
+ OUT_OF_BOUNDS(-6),
+ UNEXPECTED(-7),
+ REQUEST_BUSY(-8),
+ RESULT_NOT_READY(-9),
+ NOT_ALLOCATED(-10),
+ INFER_NOT_STARTED(-11),
NETWORK_NOT_READ(-12);
private int value;
package org.intel.openvino;
-import java.util.concurrent.BlockingDeque;
-
public class TensorDesc extends IEWrapper {
- public TensorDesc(long addr){
- super(addr);
+ public TensorDesc(long addr) {
+ super(addr);
}
public TensorDesc(Precision precision, int[] dims, Layout layout) {
return GetDims(nativeObj);
}
- public Layout getLayout(){
+ public Layout getLayout() {
return Layout.valueOf(getLayout(nativeObj));
}
- public Precision getPrecision(){
+ public Precision getPrecision() {
return Precision.valueOf(getPrecision(nativeObj));
}
package org.intel.openvino;
public enum WaitMode {
- RESULT_READY(-1), STATUS_ONLY(0);
+ RESULT_READY(-1),
+ STATUS_ONLY(0);
private int value;
public int getValue() {
return value;
}
-}
\ No newline at end of file
+}
-import java.util.Map;
import java.util.HashMap;
+import java.util.Map;
public class ArgumentParser {
private Map<String, String> input;
}
public void parseArgs(String[] args) {
- try{
- for(int i = 0; i < args.length; i++) {
+ try {
+ for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (arg.equals("--help") | arg.equals("-h")) {
printHelp();
}
}
}
- } catch(ArrayIndexOutOfBoundsException e) {
+ } catch (ArrayIndexOutOfBoundsException e) {
System.out.println("Error: Incorrect number of arguments");
System.exit(0);
}
## Build and run
-Build and run steps are similar to ```benchmark_app```, but you need to add OpenCV path.
-
-### Build
-Add an environment variable with OpenCV installation or build path:
+Build and run steps are similar to ```benchmark_app```, but you need to add an environment variable with OpenCV installation or build path before building:
```bash
export OpenCV_DIR=/path/to/opencv/
```
-import java.util.Map;
-
import org.intel.openvino.*;
+import java.util.Map;
+
public class InferReqWrap {
- public InferReqWrap(ExecutableNetwork net, int id, InferRequestsQueue irQueue) {
- request = net.CreateInferRequest();
+ public InferReqWrap(ExecutableNetwork net, int id, InferRequestsQueue irQueue) {
+ request = net.CreateInferRequest();
this.id = id;
this.irQueue = irQueue;
- request.SetCompletionCallback(new Runnable() {
-
- @Override
- public void run() {
- endTime = System.nanoTime();
- irQueue.putIdleRequest(id, getExecutionTimeInMilliseconds());
- }
- });
+ request.SetCompletionCallback(
+ new Runnable() {
+
+ @Override
+ public void run() {
+ endTime = System.nanoTime();
+ irQueue.putIdleRequest(id, getExecutionTimeInMilliseconds());
+ }
+ });
}
void startAsync() {
}
double getExecutionTimeInMilliseconds() {
- return (double)(endTime - startTime) * 1e-6;
+ return (double) (endTime - startTime) * 1e-6;
}
-
+
InferRequest request;
private InferRequestsQueue irQueue;
private long startTime;
+import org.intel.openvino.*;
+
import java.util.Vector;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
-import org.intel.openvino.*;
-
public class InferRequestsQueue {
public InferRequestsQueue(ExecutableNetwork net, int nireq) {
for (int id = 0; id < nireq; id++) {
}
resetTimes();
}
-
+
void resetTimes() {
startTime = Long.MAX_VALUE;
endTime = Long.MIN_VALUE;
latencies.clear();
}
-
+
double getDurationInMilliseconds() {
- return (double)(endTime - startTime) * 1e-6;
+ return (double) (endTime - startTime) * 1e-6;
}
-
+
void putIdleRequest(int id, double latency) {
latencies.add(latency);
idleIds.add(id);
foo.notify();
}
}
-
+
InferReqWrap getIdleRequest() {
try {
InferReqWrap request = requests.get(idleIds.take());
}
return null;
}
-
+
void waitAll() {
synchronized (foo) {
try {
- while(idleIds.size() != requests.size()) {
+ while (idleIds.size() != requests.size()) {
foo.wait();
}
} catch (InterruptedException e) {
}
}
}
-
+
Vector<Double> getLatencies() {
return latencies;
}
-
+
Vector<InferReqWrap> requests = new Vector<InferReqWrap>();
private BlockingQueue<Integer> idleIds = new LinkedBlockingQueue<Integer>();
private long startTime;
private long endTime;
- Vector<Double> latencies = new Vector<Double>();
+ Vector<Double> latencies = new Vector<Double>();
Object foo = new Object();
}
-import java.util.Map;
-import java.util.Vector;
-
-import javax.management.RuntimeErrorException;
+import org.intel.openvino.*;
-import java.util.Random;
-import java.util.HashMap;
-import java.util.LinkedList;
import java.util.ArrayList;
-
import java.util.Arrays;
-
-import org.intel.openvino.*;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+import java.util.Vector;
public class Main {
- static boolean adjustShapesBatch(Map<String, int[]> shapes, int batchSize, Map<String, InputInfo> inputInfo) {
+ static boolean adjustShapesBatch(
+ Map<String, int[]> shapes, int batchSize, Map<String, InputInfo> inputInfo) {
boolean updated = false;
for (Map.Entry<String, InputInfo> entry : inputInfo.entrySet()) {
Layout layout = entry.getValue().getTensorDesc().getLayout();
int batchIndex = -1;
- if ((layout == Layout.NCHW) || (layout == Layout.NCDHW) ||
- (layout == Layout.NHWC) || (layout == Layout.NDHWC) ||
- (layout == Layout.NC)) {
+ if ((layout == Layout.NCHW)
+ || (layout == Layout.NCDHW)
+ || (layout == Layout.NHWC)
+ || (layout == Layout.NDHWC)
+ || (layout == Layout.NC)) {
batchIndex = 0;
} else if (layout == Layout.CN) {
batchIndex = 1;
return updated;
}
- static String setThroughputStreams(IECore core, Map<String, String> device_config, String device, int nstreams, boolean isAsync) {
+ static String setThroughputStreams(
+ IECore core,
+ Map<String, String> device_config,
+ String device,
+ int nstreams,
+ boolean isAsync) {
String key = device + "_THROUGHPUT_STREAMS";
if (nstreams > 0) {
device_config.put(key, Integer.toString(nstreams));
} else if (!device_config.containsKey(key) && isAsync) {
- System.err.println("[ WARNING ] -nstreams default value is determined automatically for " + device + " device. " +
- "Although the automatic selection usually provides a reasonable performance," +
- "but it still may be non-optimal for some cases, for more information look at README.");
+ System.err.println(
+ "[ WARNING ] -nstreams default value is determined automatically for "
+ + device
+ + " device. Although the automatic selection usually provides a"
+ + " reasonable performance,but it still may be non-optimal for some"
+ + " cases, for more information look at README.");
device_config.put(key, device + "_THROUGHPUT_AUTO");
}
return device_config.get(key);
- };
+ }
static void fillBlobs(Vector<InferReqWrap> requests, Map<String, InputInfo> inputsInfo) {
for (Map.Entry<String, InputInfo> entry : inputsInfo.entrySet()) {
String inputName = entry.getKey();
TensorDesc tDesc = entry.getValue().getTensorDesc();
- System.err.print("[ INFO ] Network input '" + inputName + "' precision " + tDesc.getPrecision()
- + ", dimensions (" + tDesc.getLayout() + "): ");
- for (int dim : tDesc.getDims())
- System.err.print(dim + " ");
+ System.err.print(
+ "[ INFO ] Network input '"
+ + inputName
+ + "' precision "
+ + tDesc.getPrecision()
+ + ", dimensions ("
+ + tDesc.getLayout()
+ + "): ");
+
+ for (int dim : tDesc.getDims()) System.err.print(dim + " ");
System.err.println();
}
String inputName = entry.getKey();
TensorDesc tDesc = entry.getValue().getTensorDesc();
request.SetBlob(inputName, blobRandomByte(tDesc));
- }
+ }
}
}
int dims[] = tDesc.getDims();
int size = 1;
- for(int i = 0; i < dims.length; i++) {
+ for (int i = 0; i < dims.length; i++) {
size *= dims[i];
}
static double getMedianValue(Vector<Double> vec) {
Object[] objArr = vec.toArray();
- Double[] arr = Arrays.copyOf(objArr, objArr.length, Double[].class);
+ Double[] arr = Arrays.copyOf(objArr, objArr.length, Double[].class);
Arrays.sort(arr);
if (arr.length % 2 == 0)
- return ((double)arr[arr.length / 2] + (double)arr[arr.length / 2 - 1]) / 2;
- else
- return (double)arr[arr.length / 2];
+ return ((double) arr[arr.length / 2] + (double) arr[arr.length / 2 - 1]) / 2;
+ else return (double) arr[arr.length / 2];
}
static boolean getApiBoolean(String api) throws RuntimeException {
- if(api.equals("sync"))
- return false;
- else if(api.equals("async"))
- return true;
+ if (api.equals("sync")) return false;
+ else if (api.equals("async")) return true;
else throw new RuntimeException("Incorrect argument: '-api'");
}
static int step = 0;
- static void nextStep(String stepInfo) {
+
+ static void nextStep(String stepInfo) {
step += 1;
System.out.println("[Step " + step + "/11] " + stepInfo);
}
static int deviceDefaultDeviceDurationInSeconds(String device) {
- final Map<String, Integer> deviceDefaultDurationInSeconds = new HashMap<String, Integer>() {{
- put("CPU", 60 );
- put("GPU", 60 );
- put("VPU", 60 );
- put("MYRIAD", 60 );
- put("HDDL", 60 );
- put("FPGA", 120);
- put("UNKNOWN", 120);
- }};
+ final Map<String, Integer> deviceDefaultDurationInSeconds =
+ new HashMap<String, Integer>() {
+ {
+ put("CPU", 60);
+ put("GPU", 60);
+ put("VPU", 60);
+ put("MYRIAD", 60);
+ put("HDDL", 60);
+ put("FPGA", 120);
+ put("UNKNOWN", 120);
+ }
+ };
Integer duration = deviceDefaultDurationInSeconds.get(device);
if (duration == null) {
duration = deviceDefaultDurationInSeconds.get("UNKNOWN");
- System.err.println("[ WARNING ] Default duration " + duration + " seconds for unknown device '" + device + "' is used");
+ System.err.println(
+ "[ WARNING ] Default duration "
+ + duration
+ + " seconds for unknown device '"
+ + device
+ + "' is used");
}
return duration;
}
static long getTotalMsTime(long startTimeMilliSec) {
return (System.currentTimeMillis() - startTimeMilliSec);
- };
+ }
static long getDurationInMilliseconds(int seconds) {
return seconds * 1000L;
System.exit(1);
}
- // ----------------- 1. Parsing and validating input arguments ---------------------------------------------
+ // ----------------- 1. Parsing and validating input arguments -----------------
nextStep("Parsing and validating input arguments");
ArgumentParser parser = new ArgumentParser("This is benchmarking application");
int batchSize = parser.getInteger("-b", 0);
int nthreads = parser.getInteger("-nthreads", 0);
int nstreams = parser.getInteger("-nstreams", 0);
- int timeLimit = parser.getInteger("-t",0);
+ int timeLimit = parser.getInteger("-t", 0);
String api = parser.get("-api", "async");
boolean isAsync;
- try{
+ try {
isAsync = getApiBoolean(api);
- } catch(RuntimeException e) {
+ } catch (RuntimeException e) {
System.out.println(e.getMessage());
return;
}
- if(xmlPath == null) {
+ if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
- // ----------------- 2. Loading the Inference Engine --------------------------------------------------------
+ // ----------------- 2. Loading the Inference Engine --------------------------
nextStep("Loading the Inference Engine");
IECore core = new IECore();
- // ----------------- 3. Setting device configuration --------------------------------------------------------
+ // ----------------- 3. Setting device configuration --------------------------
nextStep("Setting device configuration");
Map<String, String> device_config = new HashMap<>();
- if (device.equals("CPU")) { // CPU supports few special performance-oriented keys
+ if (device.equals("CPU")) { // CPU supports few special performance-oriented keys
// limit threading for CPU portion of inference
- if (nthreads > 0)
- device_config.put("CPU_THREADS_NUM", Integer.toString(nthreads));
+ if (nthreads > 0) device_config.put("CPU_THREADS_NUM", Integer.toString(nthreads));
if (!device_config.containsKey("CPU_BIND_THREAD")) {
- device_config.put("CPU_BIND_THREAD", "YES");
+ device_config.put("CPU_BIND_THREAD", "YES");
}
// for CPU execution, more throughput-oriented execution via streams
} else if (device.equals("MYRIAD")) {
device_config.put("LOG_LEVEL", "LOG_WARNING");
} else if (device.equals("GNA")) {
- device_config.put("GNA_PRECISION", "I16");
+ device_config.put("GNA_PRECISION", "I16");
- if (nthreads > 0)
- device_config.put("GNA_LIB_N_THREADS", Integer.toString(nthreads));
+ if (nthreads > 0) device_config.put("GNA_LIB_N_THREADS", Integer.toString(nthreads));
}
core.SetConfig(device_config, device);
- // ----------------- 4. Reading the Intermediate Representation network -------------------------------------
+ // ----------- 4. Reading the Intermediate Representation network -------------
nextStep("Reading the Intermediate Representation network");
long startTime = System.currentTimeMillis();
String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
InputInfo inputInfo = inputsInfo.get(inputName);
- // ----------------- 5. Resizing network to match image sizes and given batch -------------------------------
+ // ----- 5. Resizing network to match image sizes and given batch --------------
nextStep("Resizing network to match image sizes and given batch");
int inputBatchSize = batchSize;
batchSize = net.getBatchSize();
Map<String, int[]> shapes = net.getInputShapes();
-
+
if ((inputBatchSize != 0) && (batchSize != inputBatchSize)) {
adjustShapesBatch(shapes, batchSize, inputsInfo);
System.err.println("[ INFO ] Reshape network took " + durationMs + " ms");
}
- System.err.println((inputBatchSize != 0 ? "[ INFO ] Network batch size was changed to: " : "[ INFO ] Network batch size: ") + batchSize);
+ System.err.println(
+ (inputBatchSize != 0
+ ? "[ INFO ] Network batch size was changed to: "
+ : "[ INFO ] Network batch size: ")
+ + batchSize);
- // ----------------- 6. Configuring input -------------------------------------------------------------------
+ // ----------------- 6. Configuring input -------------------------------------
nextStep("Configuring input");
inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
inputInfo.setPrecision(Precision.U8);
- // ----------------- 7. Loading the model to the device -----------------------------------------------------
+ // ----------------- 7. Loading the model to the device -----------------------
nextStep("Loading the model to the device");
startTime = System.currentTimeMillis();
System.err.println("[ INFO ] Load network took " + durationMs + " ms");
- // ----------------- 8. Setting optimal runtime parameters --------------------------------------------------
+ // ---------------- 8. Setting optimal runtime parameters ---------------------
nextStep("Setting optimal runtime parameters");
// Update number of streams
- nstreams = Integer.parseInt(core.GetConfig(device, device + "_THROUGHPUT_STREAMS").asString());
+ String nStr = core.GetConfig(device, device + "_THROUGHPUT_STREAMS").asString();
+ nstreams = Integer.parseInt(nStr);
// Number of requests
if (nireq == 0) {
int temp = niter;
niter = ((niter + nireq - 1) / nireq) * nireq;
if (temp != niter) {
- System.err.println("[ INFO ] Number of iterations was aligned by request number from " +
- temp + " to " + niter + " using number of requests " + nireq);
+ System.err.println(
+ "[ INFO ] Number of iterations was aligned by request number from "
+ + " to "
+ + niter
+ + " using number of requests "
+ + nireq);
}
}
durationSeconds = deviceDefaultDeviceDurationInSeconds(device);
}
durationMs = getDurationInMilliseconds(durationSeconds);
-
- // ----------------- 9. Creating infer requests and filling input blobs -------------------------------------
+
+ // ---------- 9. Creating infer requests and filling input blobs ---------------
nextStep("Creating infer requests and filling input blobs");
InferRequestsQueue inferRequestsQueue = new InferRequestsQueue(executableNetwork, nireq);
fillBlobs(inferRequestsQueue.requests, inputsInfo);
- // ----------------- 10. Measuring performance --------------------------------------------------------------
+ // ---------- 10. Measuring performance ----------------------------------------
String ss = "Start inference " + api + "ronously";
if (isAsync) {
if (!ss.isEmpty()) {
startTime = System.currentTimeMillis();
long execTime = getTotalMsTime(startTime);
-
- while ((niter != 0 && iteration < niter) ||
- (durationMs != 0L && execTime < durationMs) ||
- (isAsync && iteration % nireq != 0)) {
+
+ while ((niter != 0 && iteration < niter)
+ || (durationMs != 0L && execTime < durationMs)
+ || (isAsync && iteration % nireq != 0)) {
inferRequest = inferRequestsQueue.getIdleRequest();
-
+
if (isAsync) {
- // As the inference request is currently idle, the wait() adds no additional overhead
- //(and should return immediately).
+ // As the inference request is currently idle, the wait() adds no additional
+ // overhead (and should return immediately).
// The primary reason for calling the method is exception checking/re-throwing.
// Callback, that governs the actual execution can handle errors as well,
- // but as it uses just error codes it has no details like ‘what()’ method of `std::exception`
+ // but as it uses just error codes it has no details like ‘what()’ method of
+ // `std::exception`.
// So, rechecking for any exceptions here.
inferRequest._wait();
inferRequest.startAsync();
double latency = getMedianValue(inferRequestsQueue.getLatencies());
double totalDuration = inferRequestsQueue.getDurationInMilliseconds();
- double fps = (!isAsync) ? batchSize * 1000.0 / latency :
- batchSize * 1000.0 * iteration / totalDuration;
+ double fps =
+ (!isAsync)
+ ? batchSize * 1000.0 / latency
+ : batchSize * 1000.0 * iteration / totalDuration;
- // ----------------- 11. Dumping statistics report ----------------------------------------------------------
+ // ------------ 11. Dumping statistics report ----------------------------------
nextStep("Dumping statistics report");
System.out.println("Count: " + iteration + " iterations");
+import org.intel.openvino.*;
import org.opencv.core.*;
-import org.opencv.imgcodecs.*;
import org.opencv.highgui.HighGui;
+import org.opencv.imgcodecs.*;
import org.opencv.imgproc.Imgproc;
-import org.intel.openvino.*;
-import java.util.Map;
-import java.util.Set;
import java.util.ArrayList;
+import java.util.Map;
/*
This is face detection java sample.
-Upon the start-up the sample application reads command line parameters and loads a network
-and an image to the Inference Engine device. When inference is done, the application will show
-the image with detected objects enclosed in rectangles in new window.It also outputs the
-confidence value and the coordinates of the rectangle to the standard output stream.
+Upon the start-up the sample application reads command line parameters and loads a network
+and an image to the Inference Engine device. When inference is done, the application will show
+the image with detected objects enclosed in rectangles in new window.It also outputs the
+confidence value and the coordinates of the rectangle to the standard output stream.
To get the list of command line parameters run the application with `--help` paramether.
*/
String imgPath = parser.get("-i", null);
String xmlPath = parser.get("-m", null);
- if(imgPath == null) {
+ if (imgPath == null) {
System.out.println("Error: Missed argument: -i");
return;
}
- if(xmlPath == null) {
+ if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
Mat image = Imgcodecs.imread(imgPath);
-
+
int[] dimsArr = {1, image.channels(), image.height(), image.width()};
TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC);
- // The source image is also used at the end of the program to display the detection results,
- // therefore the Mat object won't be destroyed by Garbage Collector while the network is running.
+ // The source image is also used at the end of the program to display the detection results,
+ // therefore the Mat object won't be destroyed by Garbage Collector while the network is
+ // running.
Blob imgBlob = new Blob(tDesc, image.dataAddr());
-
+
IECore core = new IECore();
CNNNetwork net = core.ReadNetwork(xmlPath);
ExecutableNetwork executableNetwork = core.LoadNetwork(net, "CPU");
InferRequest inferRequest = executableNetwork.CreateInferRequest();
- inferRequest.SetBlob(inputName, imgBlob);
+ inferRequest.SetBlob(inputName, imgBlob);
inferRequest.Infer();
Blob output = inferRequest.GetBlob(outputName);
for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) {
int image_id = (int) detection[curProposal * 7];
- if (image_id < 0)
- break;
+ if (image_id < 0) break;
float confidence = detection[curProposal * 7 + 2];
// Drawing only objects with >70% probability
- if (confidence < THRESHOLD)
- continue;
-
+ if (confidence < THRESHOLD) continue;
+
int label = (int) (detection[curProposal * 7 + 1]);
int xmin = (int) (detection[curProposal * 7 + 3] * image.cols());
int ymin = (int) (detection[curProposal * 7 + 4] * image.rows());
int xmax = (int) (detection[curProposal * 7 + 5] * image.cols());
int ymax = (int) (detection[curProposal * 7 + 6] * image.rows());
- System.out.println("[" + curProposal + "," + label + "] element, prob = " + confidence + " (" + xmin
- + "," + ymin + ")-(" + xmax + "," + ymax + ")");
+ String result = "[" + curProposal + "," + label + "] element, prob = " + confidence;
+ result += " (" + xmin + "," + ymin + ")-(" + xmax + "," + ymax + ")";
+
+ System.out.println(result);
System.out.println(" - WILL BE PRINTED!");
// Draw rectangle around detected object.
- Imgproc.rectangle(image, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0));
+ Imgproc.rectangle(
+ image, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0));
}
HighGui.namedWindow("Detection", HighGui.WINDOW_AUTOSIZE);
+import org.intel.openvino.*;
import org.opencv.core.*;
+import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.*;
-import org.opencv.videoio.*;
import org.opencv.imgproc.Imgproc;
-import org.opencv.highgui.HighGui;
+import org.opencv.videoio.*;
+import java.util.ArrayList;
import java.util.LinkedList;
+import java.util.Map;
+import java.util.Queue;
import java.util.Vector;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
-import java.util.Map;
-import java.util.Queue;
-import java.util.ArrayList;
-import java.util.HashMap;
-
-import org.intel.openvino.*;
/*
This is async face detection java sample.
-Upon the start-up the sample application reads command line parameters and loads a network
-and an images to the Inference Engine device. When inference is done, the application
+Upon the start-up the sample application reads command line parameters and loads a network
+and an images to the Inference Engine device. When inference is done, the application
shows the video with detected objects enclosed in rectangles in new window.
To get the list of command line parameters run the application with `--help` paramether.
*/
public class Main {
-
+
public static Blob imageToBlob(Mat image) {
int[] dimsArr = {1, image.channels(), image.height(), image.width()};
TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC);
while (!startedRequestsIds.isEmpty()) {
int requestId = startedRequestsIds.peek();
InferRequest inferRequest = inferRequests.get(requestId);
-
- if (inferRequest.Wait(wait) != StatusCode.OK)
- return;
+
+ if (inferRequest.Wait(wait) != StatusCode.OK) return;
if (size == 0 && res == null) {
size = inferRequest.GetBlob(outputName).size();
String device = parser.get("-d", "CPU");
int inferRequestsSize = parser.getInteger("-nireq", 2);
- if(imgsPath == null ) {
+ if (imgsPath == null) {
System.out.println("Error: Missed argument: -i");
return;
}
- if(xmlPath == null) {
+ if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
BlockingQueue<Mat> framesQueue = new LinkedBlockingQueue<Mat>();
- Thread captureThread = new Thread(new Runnable() {
- @Override
- public void run() {
- VideoCapture cam = new VideoCapture();
- cam.open(imgsPath);
- Mat frame = new Mat();
-
- while (cam.read(frame)) {
- framesCounter++;
- framesQueue.add(frame.clone());
- }
- }
- });
-
- Thread inferThread = new Thread(new Runnable() {
-
- @Override
- public void run() {
- try {
- IECore core = new IECore();
- CNNNetwork net = core.ReadNetwork(xmlPath);
-
- Map<String, InputInfo> inputsInfo = net.getInputsInfo();
- String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
- InputInfo inputInfo = inputsInfo.get(inputName);
-
- inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
- inputInfo.setLayout(Layout.NHWC);
- inputInfo.setPrecision(Precision.U8);
-
- outputName = new ArrayList<String>(net.getOutputsInfo().keySet()).get(0);
-
- ExecutableNetwork executableNetwork = core.LoadNetwork(net, device);
-
- asyncInferIsFree = new Vector<Boolean>(inferRequestsSize);
-
- for (int i = 0; i < inferRequestsSize; i++) {
- inferRequests.add(executableNetwork.CreateInferRequest());
- asyncInferIsFree.add(true);
+ Runnable capture =
+ new Runnable() {
+ @Override
+ public void run() {
+ VideoCapture cam = new VideoCapture();
+ cam.open(imgsPath);
+ Mat frame = new Mat();
+
+ while (cam.read(frame)) {
+ framesCounter++;
+ framesQueue.add(frame.clone());
+ }
}
-
- boolean isRunning = true;
+ };
+ Thread captureThread = new Thread(capture);
- while (captureThread.isAlive() || !framesQueue.isEmpty()) {
- processInferRequets(WaitMode.STATUS_ONLY);
+ Runnable infer =
+ new Runnable() {
+ @Override
+ public void run() {
+ try {
+ IECore core = new IECore();
+ CNNNetwork net = core.ReadNetwork(xmlPath);
- for (int i = 0; i < inferRequestsSize; i++) {
- if (!asyncInferIsFree.get(i))
- continue;
+ Map<String, InputInfo> inputsInfo = net.getInputsInfo();
+ String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
+ InputInfo inputInfo = inputsInfo.get(inputName);
- Mat frame = framesQueue.poll(0, TimeUnit.SECONDS);
+ inputInfo
+ .getPreProcess()
+ .setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
+ inputInfo.setLayout(Layout.NHWC);
+ inputInfo.setPrecision(Precision.U8);
- if (frame == null)
- break;
+ outputName =
+ new ArrayList<String>(net.getOutputsInfo().keySet()).get(0);
- InferRequest request = inferRequests.get(i);
-
- asyncInferIsFree.setElementAt(false, i);
- processedFramesQueue.add(frame); // predictionsQueue is used in rendering
+ ExecutableNetwork execNetwork = core.LoadNetwork(net, device);
- // The source frame is kept in processedFramesQueue,
- // so the frame will be removed by java Garbage Collector only after completion of inference,
- // and we can create Blob object using Mat object data address.
- Blob imgBlob = imageToBlob(frame);
- request.SetBlob(inputName, imgBlob);
+ asyncInferIsFree = new Vector<Boolean>(inferRequestsSize);
- startedRequestsIds.add(i);
- request.StartAsync();
+ for (int i = 0; i < inferRequestsSize; i++) {
+ inferRequests.add(execNetwork.CreateInferRequest());
+ asyncInferIsFree.add(true);
+ }
+
+ boolean isRunning = true;
+
+ while (captureThread.isAlive() || !framesQueue.isEmpty()) {
+ processInferRequets(WaitMode.STATUS_ONLY);
+
+ for (int i = 0; i < inferRequestsSize; i++) {
+ if (!asyncInferIsFree.get(i)) continue;
+
+ Mat frame = framesQueue.poll(0, TimeUnit.SECONDS);
+
+ if (frame == null) break;
+
+ InferRequest request = inferRequests.get(i);
+
+ asyncInferIsFree.setElementAt(false, i);
+
+ // processedFramesQueue is used in rendering
+ processedFramesQueue.add(frame);
+
+ // The source frame is kept in processedFramesQueue,
+ // so the frame will be removed by java Garbage
+ // Collector only after completion of inference,
+ // and we can create Blob object using Mat object data address.
+ Blob imgBlob = imageToBlob(frame);
+ request.SetBlob(inputName, imgBlob);
+
+ startedRequestsIds.add(i);
+ request.StartAsync();
+ }
+ }
+ processInferRequets(WaitMode.RESULT_READY);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+
+ for (Thread t : Thread.getAllStackTraces().keySet())
+ if (t.getState() == Thread.State.RUNNABLE) t.interrupt();
}
}
- processInferRequets(WaitMode.RESULT_READY);
- } catch (InterruptedException e) {
- e.printStackTrace();
-
- for (Thread t : Thread.getAllStackTraces().keySet())
- if (t.getState()==Thread.State.RUNNABLE)
- t.interrupt();
- }
- }
- });
+ };
+ Thread inferThread = new Thread(infer);
captureThread.start();
inferThread.start();
- TickMeter tm = new TickMeter();
+ TickMeter tm = new TickMeter();
+ Scalar color = new Scalar(0, 255, 0);
try {
while (inferThread.isAlive() || !detectionOutput.isEmpty()) {
- float[] detection = detectionOutput.poll(waitingTime, TimeUnit.SECONDS);
- if (detection == null)
- continue;
-
- Mat img = processedFramesQueue.poll(waitingTime, TimeUnit.SECONDS);
+ float[] detection = detectionOutput.poll(waitingTime, TimeUnit.SECONDS);
+ if (detection == null) continue;
+
+ Mat img = processedFramesQueue.poll(waitingTime, TimeUnit.SECONDS);
int maxProposalCount = detection.length / 7;
for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) {
int imageId = (int) detection[curProposal * 7];
- if (imageId < 0)
- break;
-
+ if (imageId < 0) break;
+
float confidence = detection[curProposal * 7 + 2];
// Drawing only objects with >70% probability
- if (confidence < CONFIDENCE_THRESHOLD)
- continue;
-
+ if (confidence < CONFIDENCE_THRESHOLD) continue;
+
int label = (int) (detection[curProposal * 7 + 1]);
int xmin = (int) (detection[curProposal * 7 + 3] * img.cols());
int ymin = (int) (detection[curProposal * 7 + 4] * img.rows());
int xmax = (int) (detection[curProposal * 7 + 5] * img.cols());
int ymax = (int) (detection[curProposal * 7 + 6] * img.rows());
-
+
// Draw rectangle around detected object.
- Imgproc.rectangle(img, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0), 2);
+ Point lt = new Point(xmin, ymin);
+ Point br = new Point(xmax, ymax);
+ Imgproc.rectangle(img, lt, br, color, 2);
}
- if (resultCounter == warmupNum) {
+ if (resultCounter == warmupNum) {
tm.start();
} else if (resultCounter > warmupNum) {
tm.stop();
- double worksFps = ((double)(resultCounter - warmupNum)) / tm.getTimeSec();
- double readFps = ((double)(framesCounter - warmupNum)) / tm.getTimeSec();
+ double worksFps = ((double) (resultCounter - warmupNum)) / tm.getTimeSec();
+ double readFps = ((double) (framesCounter - warmupNum)) / tm.getTimeSec();
tm.start();
- Imgproc.putText(img, "Reading fps: " + String.format("%.3f", readFps), new Point(10, 50), 0 , 0.7, new Scalar(0, 255, 0), 1);
- Imgproc.putText(img, "Inference fps: " + String.format("%.3f", worksFps), new Point(10, 80), 0 , 0.7, new Scalar(0, 255, 0), 1);
+ String label = "Reading fps: " + String.format("%.3f", readFps);
+ String label1 = "Inference fps: " + String.format("%.3f", worksFps);
+
+ Imgproc.putText(img, label, new Point(10, 50), 0, 0.7, color, 1);
+ Imgproc.putText(img, label1, new Point(10, 80), 0, 0.7, color, 1);
}
-
HighGui.imshow("Detection", img);
}
-
+
captureThread.join();
inferThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
for (Thread t : Thread.getAllStackTraces().keySet())
- if (t.getState()==Thread.State.RUNNABLE)
- t.interrupt();
+ if (t.getState() == Thread.State.RUNNABLE) t.interrupt();
}
}
+import org.intel.openvino.*;
import org.junit.Assert;
import org.junit.Test;
-import org.intel.openvino.*;
-
public class BlobTests extends IETest {
-
+
@Test
public void testGetBlob() {
int[] dimsArr = {1, 3, 200, 200};
import static org.junit.Assert.*;
+
+import org.intel.openvino.*;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
-import org.intel.openvino.*;
-
public class CNNNetworkTests extends IETest {
IECore core = new IECore();
public void testAddOutput() {
CNNNetwork net = core.ReadNetwork(modelXml);
Map<String, Data> output = net.getOutputsInfo();
-
+
assertEquals("Input size", 1, output.size());
-
+
net.addOutput("19/WithoutBiases");
output = net.getOutputsInfo();
assertEquals("Input size", 2, output.size());
}
-
}
import static org.junit.Assert.*;
-import org.junit.Test;
import org.intel.openvino.*;
+import org.junit.Test;
-import java.util.Map;
import java.util.HashMap;
+import java.util.Map;
public class IECoreTests extends IETest {
IECore core = new IECore();
-
+
@Test
public void testReadNetwork() {
CNNNetwork net = core.ReadNetwork(modelXml, modelBin);
Map<String, String> testMap = new HashMap<String, String>();
- //When specifying key values as raw strings, omit the KEY_ prefix
+ // When specifying key values as raw strings, omit the KEY_ prefix
testMap.put("CPU_BIND_THREAD", "YES");
testMap.put("CPU_THREADS_NUM", "1");
} catch (Exception e) {
exceptionMessage = e.getMessage();
}
- assertTrue(exceptionMessage.contains("Device with \"DEVISE\" name is not registered in the InferenceEngine"));
+ assertTrue(
+ exceptionMessage.contains(
+ "Device with \"DEVISE\" name is not registered in the InferenceEngine"));
}
}
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
+import org.intel.openvino.*;
import org.junit.Ignore;
-import org.junit.runner.Description;
import org.junit.Rule;
import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
import java.nio.file.Paths;
-import org.intel.openvino.*;
-
@Ignore
public class IETest {
String modelXml;
System.err.println("Failed to load Inference Engine library\n" + e);
System.exit(1);
}
- modelXml = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.xml").toString();
- modelBin = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.bin").toString();
+ modelXml =
+ Paths.get(
+ System.getenv("MODELS_PATH"),
+ "models",
+ "test_model",
+ "test_model_fp32.xml")
+ .toString();
+ modelBin =
+ Paths.get(
+ System.getenv("MODELS_PATH"),
+ "models",
+ "test_model",
+ "test_model_fp32.bin")
+ .toString();
}
@Rule
- public TestWatcher watchman = new TestWatcher() {
- @Override
- protected void succeeded(Description description) {
- System.out.println(description + " - OK");
- }
+ public TestWatcher watchman =
+ new TestWatcher() {
+ @Override
+ protected void succeeded(Description description) {
+ System.out.println(description + " - OK");
+ }
- @Override
- protected void failed(Throwable e, Description description) {
- System.out.println(description + " - FAILED");
- }
- };
+ @Override
+ protected void failed(Throwable e, Description description) {
+ System.out.println(description + " - FAILED");
+ }
+ };
}
import static org.junit.Assert.*;
-import org.junit.Test;
+
+import org.intel.openvino.*;
import org.junit.Before;
+import org.junit.Test;
+import java.util.ArrayList;
import java.util.Map;
import java.util.Vector;
-import java.util.ArrayList;
-
-import org.intel.openvino.*;
-import org.intel.openvino.InferenceEngineProfileInfo.LayerStatus;
public class InferRequestTests extends IETest {
IECore core;
ArrayList<String> resKeySet = new ArrayList<String>(res.keySet());
for (int i = 0; i < res.size(); i++) {
- String key = resKeySet.get(i);
+ String key = resKeySet.get(i);
InferenceEngineProfileInfo resVal = res.get(key);
assertEquals(key + " execType", key, layer_name.elementAt(i));
assertEquals(key + " executionIndex", i, resVal.executionIndex);
- assertTrue(resVal.status == InferenceEngineProfileInfo.LayerStatus.EXECUTED
- || resVal.status == InferenceEngineProfileInfo.LayerStatus.NOT_RUN);
+ assertTrue(
+ resVal.status == InferenceEngineProfileInfo.LayerStatus.EXECUTED
+ || resVal.status == InferenceEngineProfileInfo.LayerStatus.NOT_RUN);
}
}
@Test
public void testSetCompletionCallback() {
- inferRequest.SetCompletionCallback(new Runnable() {
+ inferRequest.SetCompletionCallback(
+ new Runnable() {
- @Override
- public void run() {
- completionCallback = true;
- }
- });
+ @Override
+ public void run() {
+ completionCallback = true;
+ }
+ });
- for(int i = 0; i < 5; i++) {
- inferRequest.Wait(WaitMode.RESULT_READY);
+ for (int i = 0; i < 5; i++) {
+ inferRequest.Wait(WaitMode.RESULT_READY);
inferRequest.StartAsync();
- }
-
- inferRequest.Wait(WaitMode.RESULT_READY);
+ }
+
+ inferRequest.Wait(WaitMode.RESULT_READY);
inferRequest.StartAsync();
StatusCode statusCode = inferRequest.Wait(WaitMode.RESULT_READY);
import static org.junit.Assert.*;
+
+import org.intel.openvino.*;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Map;
-import org.intel.openvino.*;
-
public class InputInfoTests extends IETest {
IECore core = new IECore();
public void testSetPrecision() {
CNNNetwork net = core.ReadNetwork(modelXml);
Map<String, InputInfo> inputsInfo = net.getInputsInfo();
-
+
String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
InputInfo inputInfo = inputsInfo.get(inputName);
inputInfo.setPrecision(Precision.U8);
assertEquals("setPrecision", Precision.U8, inputInfo.getPrecision());
}
-
}
+import org.intel.openvino.*;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
IETest.device = parser.get("-d", "CPU");
Result result = JUnitCore.runClasses(TestsSuite.class);
-
+
for (Failure failure : result.getFailures()) {
- System.out.println(failure.toString());
+ System.out.println(failure.toString());
}
}
}
-import org.junit.runner.RunWith;
-import org.junit.runners.AllTests;
-
import junit.framework.TestSuite;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.zip.*;
-
-import java.nio.file.FileSystems;
-import java.nio.file.Path;
-import java.nio.file.Paths;
+import org.intel.openvino.*;
+import org.junit.runner.RunWith;
+import org.junit.runners.AllTests;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
-
-import java.lang.Class;
import java.net.*;
-
-import org.intel.openvino.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.zip.*;
@RunWith(AllTests.class)
-
-public class TestsSuite extends IETest{
+public class TestsSuite extends IETest {
public static TestSuite suite() {
TestSuite suite = new TestSuite();
try {
- //get openvino_test.jar path
- String dir = new File(TestsSuite.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getPath().toString();
-
+ // get openvino_test.jar path
+ String dir =
+ new File(
+ TestsSuite.class
+ .getProtectionDomain()
+ .getCodeSource()
+ .getLocation()
+ .toURI())
+ .getPath()
+ .toString();
+
List<Class<?>> results = findClasses(dir);
for (Class<?> cl : results) {
- if (cl.getName() == "ArgumentParser")
- continue;
+ if (cl.getName() == "ArgumentParser") continue;
suite.addTest(new junit.framework.JUnit4TestAdapter(cl));
}
} catch (ClassNotFoundException e) {
ZipInputStream zip = new ZipInputStream(new FileInputStream(directory));
for (ZipEntry entry = zip.getNextEntry(); entry != null; entry = zip.getNextEntry()) {
String name = entry.getName().toString();
- if (name.endsWith(".class") && !name.contains("$") && !name.contains("/")
- && !name.equals("TestsSuite.class") && !name.equals("OpenVinoTestRunner.class") && !name.equals("IETest.class")) {
- classes.add(Class.forName(name.substring(0, name.length() - ".class".length())));
+ if (name.endsWith(".class")
+ && !name.contains("$")
+ && !name.contains("/")
+ && !name.equals("TestsSuite.class")
+ && !name.equals("OpenVinoTestRunner.class")
+ && !name.equals("IETest.class")) {
+ classes.add(
+ Class.forName(name.substring(0, name.length() - ".class".length())));
}
}
- } catch(FileNotFoundException e) {
+ } catch (FileNotFoundException e) {
System.out.println("FileNotFoundException: " + e.getMessage());
- } catch(IOException e) {
+ } catch (IOException e) {
System.out.println("IOException: " + e.getMessage());
}
return classes;