20.02
|
#include <armnn/ArmNN.hpp>
#include <armnn/TypesUtils.hpp>
#include "CsvReader.hpp"
#include "../InferenceTest.hpp"
#include <Profiling.hpp>
#include <ResolveType.hpp>
#include <boost/algorithm/string/trim.hpp>
#include <boost/algorithm/string/split.hpp>
#include <boost/algorithm/string/classification.hpp>
#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <iostream>
#include <fstream>
#include <functional>
#include <future>
#include <algorithm>
#include <iterator>
Go to the source code of this file.
Classes | |
struct | ExecuteNetworkParams |
Functions | |
template<typename TParser , typename TDataType > | |
int | MainImpl (const ExecuteNetworkParams ¶ms, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr) |
int | RunTest (const std::string &format, const std::string &inputTensorShapesStr, const vector< armnn::BackendId > &computeDevices, const std::string &dynamicBackendsPath, const std::string &path, const std::string &inputNames, const std::string &inputTensorDataFilePaths, const std::string &inputTypes, bool quantizeInput, const std::string &outputTypes, const std::string &outputNames, const std::string &outputTensorFiles, bool dequantizeOuput, bool enableProfiling, bool enableFp16TurboMode, const double &thresholdTime, bool printIntermediate, const size_t subgraphId, bool enableLayerDetails=false, bool parseUnsupported=false, const std::shared_ptr< armnn::IRuntime > &runtime=nullptr) |
int | RunCsvTest (const armnnUtils::CsvRow &csvRow, const std::shared_ptr< armnn::IRuntime > &runtime, const bool enableProfiling, const bool enableFp16TurboMode, const double &thresholdTime, const bool printIntermediate, bool enableLayerDetails=false, bool parseUnuspported=false) |
Variables | |
bool | generateTensorData = true |
int MainImpl | ( | const ExecuteNetworkParams & | params, |
const std::shared_ptr< armnn::IRuntime > & | runtime = nullptr |
||
) |
Definition at line 391 of file NetworkExecutionUtils.hpp.
References ARMNN_LOG, InferenceModel< IParser, TDataType >::GetInputQuantizationParams(), InferenceModel< IParser, TDataType >::GetInputSize(), InferenceModel< IParser, TDataType >::GetOutputBindingInfos(), InferenceModel< IParser, TDataType >::GetOutputSize(), Params::m_ComputeDevices, ExecuteNetworkParams::m_ComputeDevices, ExecuteNetworkParams::m_DequantizeOutput, Params::m_DynamicBackendsPath, ExecuteNetworkParams::m_DynamicBackendsPath, Params::m_EnableFp16TurboMode, ExecuteNetworkParams::m_EnableFp16TurboMode, ExecuteNetworkParams::m_EnableLayerDetails, ExecuteNetworkParams::m_EnableProfiling, ExecuteNetworkParams::m_GenerateTensorData, Params::m_InputBindings, ExecuteNetworkParams::m_InputNames, Params::m_InputShapes, ExecuteNetworkParams::m_InputTensorDataFilePaths, ExecuteNetworkParams::m_InputTensorShapes, ExecuteNetworkParams::m_InputTypes, Params::m_IsModelBinary, ExecuteNetworkParams::m_IsModelBinary, Params::m_ModelPath, ExecuteNetworkParams::m_ModelPath, Params::m_OutputBindings, ExecuteNetworkParams::m_OutputNames, ExecuteNetworkParams::m_OutputTensorFiles, ExecuteNetworkParams::m_OutputTypes, Params::m_ParseUnsupported, ExecuteNetworkParams::m_ParseUnsupported, ExecuteNetworkParams::m_PrintIntermediate, Params::m_PrintIntermediateLayers, ExecuteNetworkParams::m_QuantizeInput, Params::m_SubgraphId, ExecuteNetworkParams::m_SubgraphId, ExecuteNetworkParams::m_ThresholdTime, Params::m_VisualizePostOptimizationModel, InferenceModel< IParser, TDataType >::Run(), and Exception::what().
int RunCsvTest | ( | const armnnUtils::CsvRow & | csvRow, |
const std::shared_ptr< armnn::IRuntime > & | runtime, | ||
const bool | enableProfiling, | ||
const bool | enableFp16TurboMode, | ||
const double & | thresholdTime, | ||
const bool | printIntermediate, | ||
bool | enableLayerDetails = false , |
||
bool | parseUnuspported = false |
||
) |
Definition at line 750 of file NetworkExecutionUtils.hpp.
References ARMNN_LOG, armnn::BackendRegistryInstance(), BackendRegistry::GetBackendIdsAsString(), armnn::IgnoreUnused(), RunTest(), and CsvRow::values.
Referenced by main().
int RunTest | ( | const std::string & | format, |
const std::string & | inputTensorShapesStr, | ||
const vector< armnn::BackendId > & | computeDevices, | ||
const std::string & | dynamicBackendsPath, | ||
const std::string & | path, | ||
const std::string & | inputNames, | ||
const std::string & | inputTensorDataFilePaths, | ||
const std::string & | inputTypes, | ||
bool | quantizeInput, | ||
const std::string & | outputTypes, | ||
const std::string & | outputNames, | ||
const std::string & | outputTensorFiles, | ||
bool | dequantizeOuput, | ||
bool | enableProfiling, | ||
bool | enableFp16TurboMode, | ||
const double & | thresholdTime, | ||
bool | printIntermediate, | ||
const size_t | subgraphId, | ||
bool | enableLayerDetails = false , |
||
bool | parseUnsupported = false , |
||
const std::shared_ptr< armnn::IRuntime > & | runtime = nullptr |
||
) |
Definition at line 537 of file NetworkExecutionUtils.hpp.
References ARMNN_LOG, ExecuteNetworkParams::m_ComputeDevices, ExecuteNetworkParams::m_DequantizeOutput, ExecuteNetworkParams::m_DynamicBackendsPath, ExecuteNetworkParams::m_EnableFp16TurboMode, ExecuteNetworkParams::m_EnableLayerDetails, ExecuteNetworkParams::m_EnableProfiling, ExecuteNetworkParams::m_GenerateTensorData, ExecuteNetworkParams::m_InputNames, ExecuteNetworkParams::m_InputTensorDataFilePaths, ExecuteNetworkParams::m_InputTensorShapes, ExecuteNetworkParams::m_InputTypes, ExecuteNetworkParams::m_IsModelBinary, ExecuteNetworkParams::m_ModelPath, ExecuteNetworkParams::m_OutputNames, ExecuteNetworkParams::m_OutputTensorFiles, ExecuteNetworkParams::m_OutputTypes, ExecuteNetworkParams::m_ParseUnsupported, ExecuteNetworkParams::m_PrintIntermediate, ExecuteNetworkParams::m_QuantizeInput, ExecuteNetworkParams::m_SubgraphId, ExecuteNetworkParams::m_ThresholdTime, and Exception::what().
Referenced by BOOST_FIXTURE_TEST_CASE(), main(), and RunCsvTest().
bool generateTensorData = true |
Definition at line 361 of file NetworkExecutionUtils.hpp.