return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
}
- // TODO. nnstreamer needs fixed dimention with 4 for nntrainer tensor filter. Why??
- std::vector<unsigned int> indim(4, 1);
+ // NNStreamer uses a fixed dimention with 16.
+ std::vector<unsigned int> indim(ML_TENSOR_RANK_LIMIT, 1);
LOGI("Input tensor(%zu) shape:", layer_idx);
for (auto& output : mDesignated_outputs) {
inference_engine_tensor_info tensor_info;
ml_tensor_type_e out_type;
- unsigned int out_dim[MAX_TENSOR_DIMENSION_SIZE];
+ unsigned int out_dim[ML_TENSOR_RANK_LIMIT];
size_t out_size = 1;
ret = ml_tensors_info_get_tensor_type(mOutputInfoHandle, output.second, &out_type);