• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "DequantizeTestImpl.hpp"
7 
8 #include <ResolveType.hpp>
9 
10 
11 #include <backendsCommon/test/TensorCopyUtils.hpp>
12 #include <backendsCommon/test/WorkloadTestUtils.hpp>
13 
14 #include <test/TensorHelpers.hpp>
15 
16 namespace
17 {
18 
19 template<typename T, std::size_t Dim, typename T1=float>
DequantizeTestImpl(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager,const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const std::vector<T> & inputData,const std::vector<T1> & expectedOutputData,armnn::DequantizeQueueDescriptor descriptor)20 LayerTestResult<T1, Dim> DequantizeTestImpl(
21         armnn::IWorkloadFactory& workloadFactory,
22         const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
23         const armnn::TensorInfo& inputTensorInfo,
24         const armnn::TensorInfo& outputTensorInfo,
25         const std::vector<T>& inputData,
26         const std::vector<T1>& expectedOutputData,
27         armnn::DequantizeQueueDescriptor descriptor)
28 {
29     IgnoreUnused(memoryManager);
30     boost::multi_array<T, Dim> input = MakeTensor<T, Dim>(inputTensorInfo, inputData);
31 
32     LayerTestResult<T1, Dim> ret(outputTensorInfo);
33     ret.outputExpected = MakeTensor<T1, Dim>(outputTensorInfo, expectedOutputData);
34 
35     ARMNN_NO_DEPRECATE_WARN_BEGIN
36     std::unique_ptr<armnn::ITensorHandle> inputHandle = workloadFactory.CreateTensorHandle(inputTensorInfo);
37     std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
38     ARMNN_NO_DEPRECATE_WARN_END
39 
40     armnn::WorkloadInfo info;
41     AddInputToWorkload(descriptor, info, inputTensorInfo, inputHandle.get());
42     AddOutputToWorkload(descriptor, info, outputTensorInfo, outputHandle.get());
43 
44     std::unique_ptr<armnn::IWorkload> workload = workloadFactory.CreateDequantize(descriptor, info);
45 
46     inputHandle->Allocate();
47     outputHandle->Allocate();
48 
49     CopyDataToITensorHandle(inputHandle.get(), input.data());
50 
51     ExecuteWorkload(*workload, memoryManager);
52 
53     CopyDataFromITensorHandle(ret.output.data(), outputHandle.get());
54 
55     return ret;
56 }
57 
58 template <armnn::DataType ArmnnInputType,
59           armnn::DataType ArmnnOutputType=armnn::DataType::Float32,
60           typename OutType=armnn::ResolveType<ArmnnOutputType>>
DequantizeSimpleTest(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)61 LayerTestResult<OutType, 4> DequantizeSimpleTest(
62     armnn::IWorkloadFactory& workloadFactory,
63     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
64 {
65     using T = armnn::ResolveType<ArmnnInputType>;
66 
67     armnn::DequantizeQueueDescriptor desc;
68 
69     const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 0);
70     const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, ArmnnOutputType);
71 
72     std::vector<T> inputData = std::vector<T>(
73     {
74          2,  4,  6,
75          8, 10, 12,
76         14, 16, 18,
77         20, 22, 24,
78     });
79 
80     std::vector<OutType> expectedOutputData;
81     for (OutType i = OutType(1); i <= OutType(12); ++i)
82     {
83         expectedOutputData.push_back(i);
84     }
85 
86     return DequantizeTestImpl<T, 4, OutType>(workloadFactory,
87                                              memoryManager,
88                                              inputTensorInfo,
89                                              outputTensorInfo,
90                                              inputData,
91                                              expectedOutputData,
92                                              desc);
93 }
94 
95 template <armnn::DataType ArmnnInputType>
DequantizeOffsetTest(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)96 LayerTestResult<float, 4> DequantizeOffsetTest(
97     armnn::IWorkloadFactory& workloadFactory,
98     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
99 {
100     using T = armnn::ResolveType<ArmnnInputType>;
101 
102     armnn::DequantizeQueueDescriptor desc;
103 
104     const armnn::TensorInfo inputTensorInfo({1, 2, 2, 3}, ArmnnInputType, 0.5f, 1);
105     const armnn::TensorInfo outputTensorInfo({1, 2, 2, 3}, armnn::DataType::Float32);
106 
107     std::vector<T> inputData = std::vector<T>(
108     {
109          3,  5,  7,
110          9, 11, 13,
111         15, 17, 19,
112         21, 23, 25,
113     });
114 
115     std::vector<float> expectedOutputData = std::vector<float>(
116     {
117         1.0f,   2.0f,  3.0f,
118         4.0f,   5.0f,  6.0f,
119         7.0f,   8.0f,  9.0f,
120         10.0f, 11.0f, 12.0f,
121     });
122 
123     return DequantizeTestImpl<T, 4>(workloadFactory,
124                                     memoryManager,
125                                     inputTensorInfo,
126                                     outputTensorInfo,
127                                     inputData,
128                                     expectedOutputData,
129                                     desc);
130 }
131 
132 } // anonymous namespace
133 
DequantizeSimpleUint8Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)134 LayerTestResult<float, 4> DequantizeSimpleUint8Test(
135     armnn::IWorkloadFactory& workloadFactory,
136     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
137 {
138     return DequantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
139 }
140 
DequantizeOffsetUint8Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)141 LayerTestResult<float, 4> DequantizeOffsetUint8Test(
142     armnn::IWorkloadFactory& workloadFactory,
143     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
144 {
145     return DequantizeOffsetTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
146 }
147 
DequantizeSimpleAsymmInt8Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)148 LayerTestResult<float, 4> DequantizeSimpleAsymmInt8Test(
149     armnn::IWorkloadFactory& workloadFactory,
150     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
151 {
152     return DequantizeSimpleTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
153 }
154 
DequantizeOffsetAsymmInt8Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)155 LayerTestResult<float, 4> DequantizeOffsetAsymmInt8Test(
156     armnn::IWorkloadFactory& workloadFactory,
157     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
158 {
159     return DequantizeOffsetTest<armnn::DataType::QAsymmS8>(workloadFactory, memoryManager);
160 }
161 
DequantizeSimpleInt8Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)162 LayerTestResult<float, 4> DequantizeSimpleInt8Test(
163         armnn::IWorkloadFactory& workloadFactory,
164         const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
165 {
166     return DequantizeSimpleTest<armnn::DataType::QSymmS8>(workloadFactory, memoryManager);
167 }
168 
DequantizeSimpleInt16Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)169 LayerTestResult<float, 4> DequantizeSimpleInt16Test(
170     armnn::IWorkloadFactory& workloadFactory,
171     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
172 {
173     return DequantizeSimpleTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
174 }
175 
DequantizeSimpleUint8ToFp16Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)176 LayerTestResult<armnn::Half, 4> DequantizeSimpleUint8ToFp16Test(
177     armnn::IWorkloadFactory& workloadFactory,
178     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
179 {
180     return DequantizeSimpleTest<armnn::DataType::QAsymmU8, armnn::DataType::Float16>(workloadFactory,
181                                                                                             memoryManager);
182 }
183 
DequantizeSimpleInt8ToFp16Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)184 LayerTestResult<armnn::Half, 4> DequantizeSimpleInt8ToFp16Test(
185         armnn::IWorkloadFactory& workloadFactory,
186         const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
187 {
188     return DequantizeSimpleTest<armnn::DataType::QSymmS8, armnn::DataType::Float16>(workloadFactory, memoryManager);
189 }
190 
DequantizeSimpleInt16ToFp16Test(armnn::IWorkloadFactory & workloadFactory,const armnn::IBackendInternal::IMemoryManagerSharedPtr & memoryManager)191 LayerTestResult<armnn::Half, 4> DequantizeSimpleInt16ToFp16Test(
192     armnn::IWorkloadFactory& workloadFactory,
193     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
194 {
195     return DequantizeSimpleTest<armnn::DataType::QSymmS16, armnn::DataType::Float16>(workloadFactory,
196                                                                                             memoryManager);
197 }
198