* tensor_t inputTensor;
* inputTensor.dimInfo.push_back(dim);
* inputTensor.data = (void*)pImageData;
+ * @endcode
*
* // Do something with inputTensor
* @since_tizen 5.5
/**
* @brief Inference engine backend configuration
*
- * This structure should be configured before InferenceEngineCommon object is
- * created and then passed to InferenceEngineCommon's constructor.
+ * @details This structure should be configured before InferenceEngineCommon object is
+ * created and then passed to InferenceEngineCommon's constructor.
*
* @since_tizen 6.0
*/
} inference_engine_config;
/**
- * @brief tensor buffer structure
+ * @brief Tensor buffer structure
*
- * This buffer contains actual tensor data so type-casting is required
- * according to a tensor data type.
+ * @details This buffer contains actual tensor data so type-casting is required
+ * according to a tensor data type.
*
* @since_tizen 6.0
*/
} inference_engine_tensor_buffer;
/**
- * @brief tensor information structure
+ * @brief Tensor information structure
*
- * This structure corresponding to a tensor contains below tensor information,
- * - a name of a given layer
- * - a tensor shape of the layer
- * - a tensor type of the layer
- * - a tensor element size of the layer.
+ * @details This structure corresponding to a tensor contains below tensor information,
+ * - a name of a given layer
+ * - a tensor shape of the layer
+ * - a tensor type of the layer
+ * - a tensor element size of the layer.
*
- * Caution. Tensor element size is not in-memory buffer size in bytes so based on a given tensor element size,
- * upper framework should allocate actual tensor buffer according to tensor data types (i.e., uint8, float32...)
+ * @remarks Tensor element size is not in-memory buffer size in bytes so based on a given tensor element size,
+ * upper framework should allocate actual tensor buffer according to tensor data types (i.e., uint8, float32...)
*
* @since_tizen 6.0
*/
} inference_engine_tensor_info;
/**
- * @brief a layer property structure
+ * @brief A layer property structure
*
- * This structure is used to get/set information to one more tensors from/to a backend engine.
- * - layer names of input or output layer.
- * - information of tensors.
+ * @details This structure is used to get/set information to one more tensors from/to a backend engine.
+ * - layer names of input or output layer.
+ * - information of tensors.
* @since_tizen 6.0
*/
typedef struct _inference_engine_layer_property {
} inference_engine_layer_property;
/**
- * @brief a capacity structure to a backend engine.
+ * @brief A capacity structure to a backend engine.
*
- * This structure is used to get information such as what features and
- * constraints a given backend engine has, and it contains below information,
- * - device list which is able to compute operations.
- * - tensor shape information a given backend engine supports for.
- * - neural network models a given backend engine supports for.
+ * @details This structure is used to get information such as what features and
+ * constraints a given backend engine has, and it contains below information,
+ * - device list which is able to compute operations.
+ * - tensor shape information a given backend engine supports for.
+ * - neural network models a given backend engine supports for.
*
* @since_tizen 6.0
*/