2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // See LICENSE file in the project root for full license information.
12 // Make a workload of the specified WorkloadType.
13 template<typename WorkloadType>
14 struct MakeWorkloadForType
16 template<typename QueueDescriptorType, typename... Args>
17 static std::unique_ptr<WorkloadType> Func(const QueueDescriptorType& descriptor,
18 const WorkloadInfo& info,
21 return std::make_unique<WorkloadType>(descriptor, info, std::forward<Args>(args)...);
25 // Specialization for void workload type used for unsupported workloads.
27 struct MakeWorkloadForType<NullWorkload>
29 template<typename QueueDescriptorType, typename... Args>
30 static std::unique_ptr<NullWorkload> Func(const QueueDescriptorType& descriptor,
31 const WorkloadInfo& info,
38 // Makes a workload for one the specified types based on the data type requirements of the tensorinfo.
39 // Specify type void as the WorkloadType for unsupported DataType/WorkloadType combos.
40 template <typename Float16Workload, typename Float32Workload, typename Uint8Workload, typename QueueDescriptorType,
42 std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor, const WorkloadInfo& info, Args&&... args)
44 const DataType dataType = !info.m_InputTensorInfos.empty() ?
45 info.m_InputTensorInfos[0].GetDataType()
46 : info.m_OutputTensorInfos[0].GetDataType();
48 BOOST_ASSERT(info.m_InputTensorInfos.empty() || info.m_OutputTensorInfos.empty()
49 || info.m_InputTensorInfos[0].GetDataType() == info.m_OutputTensorInfos[0].GetDataType());
53 case DataType::Float16:
54 return MakeWorkloadForType<Float16Workload>::Func(descriptor, info, std::forward<Args>(args)...);
55 case DataType::Float32:
56 return MakeWorkloadForType<Float32Workload>::Func(descriptor, info, std::forward<Args>(args)...);
57 case DataType::QuantisedAsymm8:
58 return MakeWorkloadForType<Uint8Workload>::Func(descriptor, info, std::forward<Args>(args)...);
60 BOOST_ASSERT_MSG(false, "Unknown DataType.");
65 // Makes a workload for one the specified types based on the data type requirements of the tensorinfo.
66 // Calling this method is the equivalent of calling the three typed MakeWorkload method with <FloatWorkload,
67 // FloatWorkload, Uint8Workload>.
68 // Specify type void as the WorkloadType for unsupported DataType/WorkloadType combos.
69 template <typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
70 std::unique_ptr<IWorkload> MakeWorkload(const QueueDescriptorType& descriptor, const WorkloadInfo& info, Args&&... args)
72 return MakeWorkload<FloatWorkload, FloatWorkload, Uint8Workload>(descriptor, info,
73 std::forward<Args>(args)...);