2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // See LICENSE file in the project root for full license information.
11 #include "INetwork.hpp"
12 #include "TypesUtils.hpp"
17 using NetworkId = int;
19 class IClTunedParameters;
22 using IRuntimePtr = std::unique_ptr<IRuntime, void(*)(IRuntime* runtime)>;
27 struct CreationOptions
29 Compute m_DefaultComputeDevice;
30 bool m_UseCpuRefAsFallback;
31 /// If set, uses the CL tuned parameters from the given object when executing CL workloads.
32 /// It will also be updated with new tuned parameters if it is configured to do so.
33 IClTunedParameters* m_ClTunedParameters;
35 CreationOptions(Compute defaultComputeDevice)
36 : m_DefaultComputeDevice(defaultComputeDevice)
37 , m_UseCpuRefAsFallback(true)
38 , m_ClTunedParameters(nullptr)
43 static IRuntime* CreateRaw(const CreationOptions& options);
44 static IRuntimePtr Create(const CreationOptions& options);
45 static void Destroy(IRuntime* runtime);
47 /// Load a complete network into the IRuntime.
48 /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
49 /// @param [in] network Complete network to load into the IRuntime.
50 /// The runtime takes ownership of the network once passed in.
51 /// @return armnn::Status
52 virtual Status LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr network) = 0;
54 virtual TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const = 0;
55 virtual TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const = 0;
57 // Evaluate network using input in inputTensors, outputs filled into outputTensors
58 virtual Status EnqueueWorkload(NetworkId networkId,
59 const InputTensors& inputTensors,
60 const OutputTensors& outputTensors) = 0;
62 /// Unload a network from the IRuntime.
63 /// At the moment this only removes the network from the m_Impl->m_Network.
64 /// This might need more work in the future to be AndroidNN compliant.
65 /// @param [in] networkId Unique identifier for the network to be unloaded. Generated in LoadNetwork().
66 /// @return armnn::Status
67 virtual Status UnloadNetwork(NetworkId networkId) = 0;
69 virtual const DeviceSpec& GetDeviceSpec() const = 0;
75 using IClTunedParametersPtr = std::unique_ptr<IClTunedParameters, void(*)(IClTunedParameters* params)>;
77 /// Manages a set of Open CL parameters which have been tuned for maximum performance.
78 /// Pass an instance of this object to the IRuntime::Create() method (via IRuntime::CreationOptions) to use it
79 /// for all CL workload execution.
81 /// Can be created in two modes:
82 /// - In UseTunedParameters mode the parameters stored in this object are used to execute CL workloads.
83 /// - In UpdateTunedParameters mode, additionally, whenever a CL workload is executed for the first time the
84 /// optimum parameters will be found and stored in this object. WARNING - This tuning can be slow.
86 /// The parameters can be loaded from and saved to a file so that you first run a slow initial read-write
87 /// execution, save the parameters for later and then run fast read-only executions using the optimised parameters.
88 class IClTunedParameters
97 /// Creates an IClTunedParameters with the given mode.
99 static IClTunedParameters* CreateRaw(Mode mode);
100 static IClTunedParametersPtr Create(Mode mode);
102 static void Destroy(IClTunedParameters* params);
104 /// Loads an existing set of tuned parameters from the given file.
105 /// If there is an error loading the file, an armnn::Exception is thrown.
106 virtual void Load(const char* filename) = 0;
108 /// Saves the current set of tuned parameters to the given file.
109 /// If there is an error saving to the file, an armnn::Exception is thrown.
110 virtual void Save(const char* filename) const = 0;
113 virtual ~IClTunedParameters() {};