3 * Copyright (C) 2020 Samsung Electronics
4 * Copyright (C) 2020 Dongju Chae <dongju.chae@samsung.com>
7 * @file ne-host-input-service.cc
9 * @brief Source of host input service
10 * @author Dongju Chae <dongju.chae@samsung.com>
11 * @bug No known bugs except for NYI items
16 #include "ne-inputservice.h"
17 #include "ne-thread-pool.h"
21 std::unique_ptr<HostInputService> HostInputService::instance_;
22 std::once_flag HostInputService::once_flag_;
25 HostInputService::getInstance ()
27 call_once (once_flag_, []() {
28 instance_.reset (new HostInputService);
30 return *(instance_.get ());
34 HostInputService::submit (const DriverAPI *api, int id,
35 const Model *model, HWmem *data, outputCallback callback)
40 if (dynamic_cast<Buffer *> (data)) {
41 /* empty model is possible */
42 return submit_buffer (api, id, model, dynamic_cast<Buffer *> (data), callback);
43 } else if (dynamic_cast<SegmentTable *> (data)) {
46 return submit_segt (api, id, model, dynamic_cast<SegmentTable *> (data), callback);
53 * @brief submit the request to the thread pool
54 * @param[in] api the driver API
55 * @param[in] id the request id
56 * @param[in] model the target model
57 * @param[in] buffer the target buffer
58 * @param[in] callback output callback
59 * @return task id if no error, otherwise a negative errno.
62 HostInputService::submit_buffer (const DriverAPI *api, int id,
63 const Model *model, Buffer *buffer, outputCallback callback)
65 taskFunc func = std::bind (&HostInputService::invoke_buffer, this,
66 api, model, buffer, callback, id);
67 ThreadTask *task = new ThreadTask (id, func);
69 return ThreadPool::getInstance().enqueueTask (task);
73 * @brief submit the request to the thread pool
74 * @param[in] api the driver API
75 * @param[in] id the request id
76 * @param[in] model the target model
77 * @param[in] segt the target segment table
78 * @param[in] callback output callback
79 * @return task id if no error, otherwise a negative errno.
82 HostInputService::submit_segt (const DriverAPI *api, int id,
83 const Model *model, SegmentTable *segt, outputCallback callback)
85 taskFunc func = std::bind (&HostInputService::invoke_segt, this,
86 api, model, segt, callback, id);
87 ThreadTask *task = new ThreadTask (id, func);
89 return ThreadPool::getInstance().enqueueTask (task);
93 * @brief remove the submitted request (if possible)
94 * @param[in] id the request id to be removed
95 * @return 0 if no erorr. otherwise a negative errno
98 HostInputService::remove (int id)
100 return ThreadPool::getInstance().removeTask (id);
104 * @brief invoke the request using APIs
105 * @param[in] api the driver API
106 * @param[in] model the target model
107 * @param[in] buffer the target buffer
108 * @param[in] callback output callback
109 * @return 0 if no error, otherwise a negative errno
110 * @note this function should be used with TRIV driver!
113 HostInputService::invoke_buffer (const DriverAPI *api, const Model *model,
114 Buffer *buffer, outputCallback callback, int task_id)
116 input_config_t input_config;
117 device_state_t state;
120 state = api->isReady();
121 if (state != device_state_t::STATE_READY) {
122 logerr (TAG, "device is not available to run inference %d\n", state);
123 goto handle_callback;
126 /** internal logic error */
127 assert (buffer != nullptr);
129 if (model != nullptr) {
130 /** consider NOP cases */
131 if (model->getProgramData() == nullptr) {
133 goto handle_callback;
136 input_config.model_id = model->getInternalID();
138 input_config.model_id = 0;
141 input_config.dbuf_fd = buffer->getDmabuf ();
142 input_config.activation_offset_addr0 = buffer->getOffset ();
143 input_config.activation_offset_addr1 = buffer->getOffset ();
144 input_config.task_id = task_id;
146 /** run the inference with the input */
147 ret = api->runInput (&input_config);
148 if (ret < 0 && ret != -ECANCELED)
149 logerr (TAG, "Failed to run the NPU inference: %d\n", ret);
152 /** should call the callback regardless of failure, to avoid deadlock */
153 if (callback != nullptr)
160 * @brief invoke the request using APIs
161 * @param[in] api the driver API
162 * @param[in] model the target model
163 * @param[in] segt the target segment table
164 * @param[in] callback output callback
165 * @return 0 if no error, otherwise a negative errno
166 * @note this function should be used with TRIV2 driver!
169 HostInputService::invoke_segt (const DriverAPI *api, const Model *model,
170 SegmentTable *segt, outputCallback callback, int task_id)
172 input_config_t input_config;
173 device_state_t state;
174 npuConstraint constraint;
177 state = api->isReady();
178 if (state != device_state_t::STATE_READY) {
179 logerr (TAG, "device is not available to run inference %d\n", state);
180 goto handle_callback;
183 /** internal logic error */
184 assert (model != nullptr);
185 assert (segt != nullptr);
187 /** consider NOP cases */
188 if (model->getProgramData() == nullptr) {
190 goto handle_callback;
193 input_config.model_id = model->getInternalID();
194 input_config.dbuf_fd = segt->getDmabuf ();
195 input_config.num_segments = segt->getNumTotalSegments ();
197 /** set constraints */
198 constraint = model->getConstraint ();
199 input_config.timeout_ms = constraint.timeout_ms;
200 input_config.priority = constraint.priority;
202 /** input handling by CPU. host inputservice only supports CPU mode */
203 input_config.input_mode = INPUT_CPU;
205 /** output handling by CPU, host inputservice only supports either interrupt or polling */
206 if (constraint.notimode == NPU_POLLING) {
207 input_config.output_mode = OUTPUT_CPU_POLL;
208 } else { /** default mode is interrupt */
209 input_config.output_mode = OUTPUT_CPU_INTR;
212 input_config.task_id = task_id;
213 /** run the inference with the input */
214 ret = api->runInput (&input_config);
215 if (ret < 0 && ret != -ECANCELED)
216 logerr (TAG, "Failed to run the NPU inference: %d\n", ret);
219 /** should call the callback regardless of failure, to avoid deadlock */
220 if (callback != nullptr)