**Ubuntu**: `/opt/trinity/share/npu-engine/testdata`
-## Example: apptest_tvn_triv2
-This application ([tvn_triv2.cc](/tests/apptests/tvn_triv2.cc)) executes TRIV2 models and compares outputs with their golden data.
+## Example: apptest_tvn_triv2_example
+This application ([tvn_triv2_example.cc](/tests/apptests/tvn_triv2_example.cc)) executes a given TRIV2 model using [libnpuhost.h](/include/host/libnpuhost.h).
Note that the testdata used here follows the NPU binary format ([npubinfmt.h](/include/common/npubinfmt.h)).
-We currently support three versions of metadata (i.e., npubinfmt_v1, v2, and v3), and TRIV2 works with npubinfmt v3 only.
+Also, TRIV2 currently works with npubinfmt v3 only.
-Let's assume you are testing the application in Ubuntu 16.04.
+Let's assume you are testing the application in Ubuntu 16.04/18.04.
```bash
$ sudo apt-get install npu-engine-example npu-engine-testdata
$ cd /opt/trinity
-$ ./bin/apptests/apptest_tvn_triv2 share/npu-engine/testdata/npubinfmt_v3/CONVE_I8_013
-...
-$ ./bin/apptests/apptest_tvn_triv2_bulk share/npu-engine/testdata/npubinfmt_v3
+$ ./bin/apptests/apptest_tvn_triv2_example share/npu-engine/testdata/npubinfmt_v3/MOBILENET_V1
...
```
-All tests should be passed if the library is successfully installed.
install_dir : join_paths(ne_bindir, 'apptests')
)
+## Apptests to guide the usage of libnpuhost APIs
+
+executable ('apptest_tvn_triv2_example',
+ 'tvn_triv2_example.cc',
+ include_directories : ne_apptest_inc,
+ dependencies : ne_test_utils_dep,
+ link_with : ne_library_shared,
+ install : b_install_dummy_apptests,
+ install_rpath : ne_libdir,
+ install_dir : join_paths(ne_bindir, 'apptests')
+)
+
## Apptests to run the compiled tvn models
executable ('apptest_tvn_triv2_bulk',
--- /dev/null
+/**
+ * Proprietary
+ * Copyright (C) 2021 Samsung Electronics
+ * Copyright (C) 2021 Dongju Chae <dongju.chae@samsung.com>
+ */
+/**
+ * @file tvn_triv2_example.cc
+ * @date 14 Jan 2021
+ * @brief Example code to use libnpuhost.h with 2-TOPs TRIV2 device
+ * @author Dongju Chae <dongju.chae@samsung.com>
+ * @bug No known bugs except for NYI items
+ */
+
+#include <libnpuhost.h>
+#include <iostream>
+#include <fstream>
+
+#define NPU_TYPE NPUCOND_TRIV2_CONN_SOCIP
+#define MODEL_NAME "model.tvn"
+#define INPUT_NAME "input_fmap_0.bin"
+
+using namespace std;
+
+ifstream::pos_type filesize (string filename)
+{
+ ifstream in(filename, ifstream::ate | ifstream::binary);
+ return in.tellg();
+}
+
+/** @brief apptest main */
+int main (int argc, char **argv)
+{
+ if (argc != 2) {
+ cerr << "No model directory provided. Skip this test\n";
+ return 0;
+ }
+
+ string dir = argv[1];
+
+ /**
+ * 1) Open a NPU device using getNPUdeviceByTypeAny().
+ *
+ * Or, You can specify NPU affinity using getNPUdeviceByType().
+ */
+ npudev_h dev;
+ if (getNPUdeviceByTypeAny (&dev, NPU_TYPE, 2 /** NPU TOPS */) < 0) {
+ cerr << "Fail to open the NPU device\n";
+ return -1;
+ }
+
+ /**
+ * 2) Register the .tvn model using registerNPUmodel().
+ */
+ string modelpath = dir + "/" + MODEL_NAME;
+
+ generic_buffer modelfile;
+ modelfile.type = BUFFER_FILE;
+ modelfile.filepath = modelpath.c_str ();
+ modelfile.size = filesize (modelpath);
+
+ uint32_t modelid;
+ if (registerNPUmodel (dev, &modelfile, &modelid) < 0) {
+ cerr << "Fail to register model\n";
+ putNPUdevice (dev);
+ return -1;
+ }
+
+ /**
+ * 3) Set the data format using setNPU_dataInfo().
+ *
+ * Even if the format/type is not matched to the model,
+ * it's internally manipulated in runtime but losing performance.
+ * If you wanna know the exact layout and format, parse the model's
+ * metadata using getNPUmodel_metadata().
+ */
+ tensors_data_info in, out;
+
+ /* here, we assume a single input/output tensor */
+ in.num_info = 1;
+ in.info[0].layout = DATA_LAYOUT_MODEL; /* model-expected layout */
+ in.info[0].type = DATA_TYPE_MODEL; /* model-expected type */
+ out.num_info = 1;
+ out.info[0].layout = DATA_LAYOUT_MODEL; /* model-expected layout */
+ out.info[0].type = DATA_TYPE_MODEL; /* model-expected type */
+
+ if (setNPU_dataInfo (dev, modelid, &in, &out) < 0) {
+ cerr << "Fail to set data info\n";
+ unregisterNPUmodel (dev, modelid);
+ putNPUdevice (dev);
+ return -1;
+ }
+
+ /**
+ * 4) Optional) Set any constraint for next inferences
+ */
+ npuConstraint constraint;
+ constraint.timeout_ms = 5000;
+ constraint.priority = NPU_PRIORITY_MID;
+ constraint.notimode = NPU_INTERRUPT;
+
+ if (setNPU_constraint (dev, modelid, constraint) < 0) {
+ cerr << "Fail to set constraints\n";
+ unregisterNPUmodel (dev, modelid);
+ putNPUdevice (dev);
+ return -1;
+ }
+
+ /**
+ * 5) Run inference using runNPU_sync().
+ * Or, you can use runNPU_async() using a callback.
+ */
+ string inputpath = dir + "/" + INPUT_NAME;
+
+ input_buffers input;
+ input.num_buffers = 1;
+ input.bufs[0].type = BUFFER_FILE;
+ input.bufs[0].filepath = inputpath.c_str ();
+ input.bufs[0].size = filesize (inputpath);
+
+ output_buffers output;
+ if (runNPU_sync(dev, modelid, &input, &output)) {
+ cerr << "Fail to run inference\n";
+ unregisterNPUmodel (dev, modelid);
+ putNPUdevice (dev);
+ return -1;
+ }
+
+ /**
+ * 6) Check the output buffer
+ */
+ for (uint32_t idx = 0; idx < output.num_buffers; idx++) {
+ generic_buffer * buffer = &output.bufs[idx];
+ if (buffer->addr != NULL) {
+ /* DO SOMETHING */
+ free (buffer->addr);
+ }
+ }
+
+ /**
+ * 7) Unregister model and clean up the instance
+ */
+ unregisterNPUmodel (dev, modelid);
+ putNPUdevice (dev);
+
+ return 0;
+}