This patch changes internal tensor size limit in MachineLearning.Inference.
Tizen Native ML api's max tensor size has been changed to 256.
Signed-off-by: Yelin Jeong <yelini.jeong@samsung.com>
/// <summary>
/// The maximum number of other/tensor instances that other/tensors may have.
/// </summary>
- internal const int SizeLimit = 16;
+ internal const int SizeLimit = 256;
/// <summary>
/// Unknown Type of Tensor information. It is internally used for error check.
}
/// <summary>
- /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 16 tensors in TensorsInfo.
+ /// Add a Tensor information to the TensorsInfo instance. Note that we support up to 256 tensors in TensorsInfo.
/// </summary>
/// <param name="type">Data element type of Tensor.</param>
- /// <param name="dimension">Dimension of Tensor. Note that we support up to 4th ranks.</param>
+ /// <param name="dimension">Dimension of Tensor. Note that we support up to 16th ranks.</param>
/// <feature>http://tizen.org/feature/machine_learning.inference</feature>
/// <exception cref="IndexOutOfRangeException">Thrown when the number of Tensor already exceeds the size limits (i.e. Tensor.SizeLimit)</exception>
/// <exception cref="ArgumentException">Thrown when the method failed due to an invalid parameter.</exception>