switch (tensorPtr->dataType())
{
+ case DataType_QAsymmS8:
+ type = armnn::DataType::QAsymmS8;
+ break;
case DataType_QuantisedAsymm8:
case DataType_QAsymmU8:
type = armnn::DataType::QAsymmU8;
inputFileStream.close();
armnn::QuantizerOptions quantizerOptions;
- quantizerOptions.m_ActivationFormat = cmdline.GetQuantizationScheme() == "QSymm16"
- ? armnn::DataType::QSymmS16
- : armnn::DataType::QAsymmU8;
+
+ if (cmdline.GetQuantizationScheme() == "QAsymmS8")
+ {
+ quantizerOptions.m_ActivationFormat = armnn::DataType::QAsymmS8;
+ }
+ else if (cmdline.GetQuantizationScheme() == "QSymmS16")
+ {
+ quantizerOptions.m_ActivationFormat = armnn::DataType::QSymmS16;
+ }
+ else
+ {
+ quantizerOptions.m_ActivationFormat = armnn::DataType::QAsymmU8;
+ }
quantizerOptions.m_PreserveType = cmdline.HasPreservedDataType();
return false;
}
- std::vector<std::string> supportedSchemes = {
- "QAsymm8",
+ std::vector<std::string> supportedSchemes =
+ {
+ "QAsymmS8",
+ "QAsymmU8",
"QSymm16"
};
("help,h", "Display help messages")
("infile,f", po::value<std::string>(&m_InputFileName)->required(),
"Input file containing float 32 ArmNN Input Graph")
- ("scheme,s", po::value<std::string>(&m_QuantizationScheme)->default_value("QAsymm8"),
- "Quantization scheme, \"QAsymm8\" or \"QSymm16\", default value QAsymm8")
+ ("scheme,s", po::value<std::string>(&m_QuantizationScheme)->default_value("QAsymmU8"),
+ "Quantization scheme,"
+ " \"QAsymmU8\" or \"QAsymmS8\" or \"QSymm16\","
+ " default value QAsymmU8")
("csvfile,c", po::value<std::string>(&m_CsvFileName)->default_value(""),
"CSV file containing paths for RAW input tensors")
("preserve-data-type,p", po::bool_switch(&m_PreserveDataType)->default_value(false),
Boolean = 4,
QuantisedSymm16 = 5, // deprecated
QAsymmU8 = 6,
- QSymmS16 = 7
+ QSymmS16 = 7,
+ QAsymmS8 = 8
}
enum DataLayout : byte {
return armnnSerializer::DataType::DataType_Signed32;
case armnn::DataType::QSymmS16:
return armnnSerializer::DataType::DataType_QSymmS16;
+ case armnn::DataType::QAsymmS8:
+ return armnnSerializer::DataType::DataType_QAsymmS8;
case armnn::DataType::QAsymmU8:
return armnnSerializer::DataType::DataType_QAsymmU8;
case armnn::DataType::Boolean:
{
DataType::Float32,
DataType::Float16,
+ DataType::QAsymmS8,
DataType::QAsymmU8,
DataType::QSymmS16
};
std::vector<DataType> supportedTypes =
{
DataType::Float32,
- DataType::QAsymmU8,
DataType::QAsymmS8,
+ DataType::QAsymmU8,
DataType::QSymmS16,
DataType::QSymmS8,
DataType::Float16
case armnn::DataType::Float16:
case armnn::DataType::Float32:
return weightsType;
+ case armnn::DataType::QAsymmS8:
+ return armnn::DataType::Signed32;
case armnn::DataType::QAsymmU8:
return armnn::DataType::Signed32;
case armnn::DataType::QSymmS16:
bool supported = true;
// Define supported types.
- std::array<DataType,4> supportedTypes =
+ std::array<DataType,5> supportedTypes =
{
DataType::Float32,
DataType::Float16,
DataType::QAsymmU8,
+ DataType::QAsymmS8,
DataType::QSymmS16
};
supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
"Reference Fully Connected: weights type not supported.");
- supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
- "Reference Fully Connected: input and weight types mismatched.");
+ ARMNN_NO_DEPRECATE_WARN_BEGIN
+ std::array<DataType, 3> supportedWeightTypes =
+ {
+ DataType::QAsymmU8,
+ DataType::QSymmS8,
+ DataType::QuantizedSymm8PerAxis // deprecated
+ };
+ ARMNN_NO_DEPRECATE_WARN_END
+
+ if (IsQuantized8BitType(input.GetDataType()))
+ {
+
+ supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
+ "Reference Fully Connected: weights type not supported for quantized input.");
+ }
+ else
+ {
+ supported &= CheckSupportRule(TypeAnyOf(weights, supportedTypes), reasonIfUnsupported,
+ "Reference Fully Connected: weights is not a supported type.");
+
+ supported &= CheckSupportRule(TypesAreEqual(input, weights), reasonIfUnsupported,
+ "Reference Fully Connected: input and weights types mismatched.");
+ }
if (descriptor.m_BiasEnabled)
{