• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5 
6 #include <algorithm>   // For std::generate, std::shuffle.
7 #include <array>       // For std::array.
8 #include <cstddef>     // For size_t.
9 #include <functional>  // For std::multiplies.
10 #include <memory>      // For std::unique_ptr.
11 #include <random>      // For std::random_device, std::mt19937, std::uniform_real_distribution.
12 #include <vector>      // For std::vector.
13 
14 #include <xnnpack.h>
15 #include <xnnpack/node-type.h>
16 #include <xnnpack/operator.h>
17 #include <xnnpack/subgraph.h>
18 
19 #include "subgraph-unary-tester.h"
20 #include <gtest/gtest.h>
21 
22 using StaticReshapeTestInt8 = UnaryTest<int8_t>;
23 using StaticReshapeTestUint8 = UnaryTest<uint8_t>;
24 using StaticReshapeTestF32= UnaryTest<float>;
25 
TEST_F(StaticReshapeTestInt8,define)26 TEST_F(StaticReshapeTestInt8, define)
27 {
28   const int32_t zero_point = i8dist(rng);
29   const float scale = scale_dist(rng);
30 
31   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
32 
33   xnn_subgraph_t subgraph = nullptr;
34   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
35   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
36 
37   input_id = XNN_INVALID_NODE_ID;
38   ASSERT_EQ(
39     xnn_status_success, xnn_define_quantized_tensor_value(
40                           subgraph, xnn_datatype_qint8, zero_point, scale, dims.size(), dims.data(),
41                           nullptr, 0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
42   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
43 
44   output_id = XNN_INVALID_NODE_ID;
45   ASSERT_EQ(
46     xnn_status_success, xnn_define_quantized_tensor_value(
47                           subgraph, xnn_datatype_qint8, zero_point, scale, dims.size(), dims.data(),
48                           nullptr, 1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
49   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
50 
51   ASSERT_EQ(xnn_status_success, xnn_define_static_reshape(subgraph, dims.size(), dims.data(), input_id, output_id, /*flags=*/0));
52 
53   ASSERT_EQ(subgraph->num_nodes, 1);
54   const struct xnn_node* node = &subgraph->nodes[0];
55   ASSERT_EQ(node->type, xnn_node_type_static_reshape);
56   ASSERT_EQ(node->compute_type, xnn_compute_type_qs8);
57   ASSERT_EQ(node->num_inputs, 1);
58   ASSERT_EQ(node->inputs[0], input_id);
59   ASSERT_EQ(node->num_outputs, 1);
60   ASSERT_EQ(node->outputs[0], output_id);
61   ASSERT_EQ(node->flags, 0);
62 }
63 
TEST_F(StaticReshapeTestUint8,define)64 TEST_F(StaticReshapeTestUint8, define)
65 {
66   const int32_t zero_point = u8dist(rng);
67   const float scale = scale_dist(rng);
68 
69   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
70 
71   xnn_subgraph_t subgraph = nullptr;
72   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
73   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
74 
75   input_id = XNN_INVALID_NODE_ID;
76   ASSERT_EQ(
77     xnn_status_success, xnn_define_quantized_tensor_value(
78                           subgraph, xnn_datatype_quint8, zero_point, scale, dims.size(), dims.data(),
79                           nullptr, 0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
80   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
81 
82   output_id = XNN_INVALID_NODE_ID;
83   ASSERT_EQ(
84     xnn_status_success, xnn_define_quantized_tensor_value(
85                           subgraph, xnn_datatype_quint8, zero_point, scale, dims.size(), dims.data(),
86                           nullptr, 1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
87   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
88 
89   ASSERT_EQ(xnn_status_success, xnn_define_static_reshape(subgraph, dims.size(), dims.data(), input_id, output_id, /*flags=*/0));
90 
91   ASSERT_EQ(subgraph->num_nodes, 1);
92   const struct xnn_node* node = &subgraph->nodes[0];
93   ASSERT_EQ(node->type, xnn_node_type_static_reshape);
94   ASSERT_EQ(node->compute_type, xnn_compute_type_qu8);
95   ASSERT_EQ(node->num_inputs, 1);
96   ASSERT_EQ(node->inputs[0], input_id);
97   ASSERT_EQ(node->num_outputs, 1);
98   ASSERT_EQ(node->outputs[0], output_id);
99   ASSERT_EQ(node->flags, 0);
100 }
101 
TEST_F(StaticReshapeTestF32,define)102 TEST_F(StaticReshapeTestF32, define)
103 {
104   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
105 
106   xnn_subgraph_t subgraph = nullptr;
107   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(0, /*flags=*/0, &subgraph));
108   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
109   uint32_t input_id = XNN_INVALID_NODE_ID;
110   ASSERT_EQ(
111     xnn_status_success,
112     xnn_define_tensor_value(
113       subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, XNN_INVALID_VALUE_ID, /*flags=*/0, &input_id));
114   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
115 
116   uint32_t output_id = XNN_INVALID_NODE_ID;
117   ASSERT_EQ(
118     xnn_status_success,
119     xnn_define_tensor_value(
120       subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, XNN_INVALID_VALUE_ID, /*flags=*/0, &output_id));
121   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
122 
123   ASSERT_EQ(xnn_status_success, xnn_define_static_reshape(subgraph, dims.size(), dims.data(), input_id, output_id, 0));
124 
125   ASSERT_EQ(subgraph->num_nodes, 1);
126   const struct xnn_node* node = &subgraph->nodes[0];
127   ASSERT_EQ(node->type, xnn_node_type_static_reshape);
128   ASSERT_EQ(node->compute_type, xnn_compute_type_fp32);
129   ASSERT_EQ(node->num_inputs, 1);
130   ASSERT_EQ(node->inputs[0], input_id);
131   ASSERT_EQ(node->num_outputs, 1);
132   ASSERT_EQ(node->outputs[0], output_id);
133   ASSERT_EQ(node->flags, 0);
134 }
135 
TEST_F(StaticReshapeTestInt8,matches_operator_api)136 TEST_F(StaticReshapeTestInt8, matches_operator_api)
137 {
138   const int32_t zero_point = i8dist(rng);
139   const float scale = scale_dist(rng);
140   std::generate(input.begin(), input.end(), [&]() { return i8dist(rng); });
141   std::fill(operator_output.begin(), operator_output.end(), INT8_C(0xA5));
142   std::fill(subgraph_output.begin(), subgraph_output.end(), INT8_C(0xA5));
143 
144   std::vector<size_t> output_dims = dims;
145   std::shuffle(output_dims.begin(), output_dims.end(), rng);
146 
147   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
148 
149   // Call operator API.
150   xnn_operator_t op = nullptr;
151   const xnn_status status =
152     xnn_create_copy_nc_x8(1, 1, 1, /*flags=*/0, &op);
153   if (status == xnn_status_unsupported_hardware) {
154     GTEST_SKIP();
155   }
156   ASSERT_EQ(xnn_status_success, status);
157   ASSERT_NE(nullptr, op);
158   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
159 
160   size_t batch_size = NumElements(dims);
161   ASSERT_EQ(
162     xnn_status_success,
163     xnn_setup_copy_nc_x8(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
164   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
165 
166   // Call subgraph API.
167   xnn_subgraph_t subgraph = nullptr;
168   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
169   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
170   input_id = XNN_INVALID_NODE_ID;
171   ASSERT_EQ(
172     xnn_status_success, xnn_define_quantized_tensor_value(
173                           subgraph, xnn_datatype_qint8, zero_point, scale, dims.size(), dims.data(),
174                           nullptr, /*external_id=*/0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
175   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
176 
177   output_id = XNN_INVALID_NODE_ID;
178   ASSERT_EQ(
179     xnn_status_success, xnn_define_quantized_tensor_value(
180                           subgraph, xnn_datatype_qint8, zero_point, scale, output_dims.size(), output_dims.data(),
181                           nullptr, /*external_id=*/1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
182   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
183 
184   ASSERT_EQ(xnn_status_success, xnn_define_static_reshape(subgraph, output_dims.size(), output_dims.data(), input_id, output_id, /*flags=*/0));
185 
186   xnn_runtime_t runtime = nullptr;
187   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
188   ASSERT_NE(nullptr, runtime);
189   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
190 
191   std::array<xnn_external_value, 2> external = {
192     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
193   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
194   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
195 
196   ASSERT_EQ(subgraph_output, operator_output);
197 }
198 
TEST_F(StaticReshapeTestUint8,matches_operator_api)199 TEST_F(StaticReshapeTestUint8, matches_operator_api)
200 {
201   const int32_t zero_point = u8dist(rng);
202   const float scale = scale_dist(rng);
203   std::generate(input.begin(), input.end(), [&]() { return u8dist(rng); });
204   std::fill(operator_output.begin(), operator_output.end(), UINT8_C(0xA5));
205   std::fill(subgraph_output.begin(), subgraph_output.end(), UINT8_C(0xA5));
206 
207   std::vector<size_t> output_dims = dims;
208   std::shuffle(output_dims.begin(), output_dims.end(), rng);
209 
210   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
211 
212   // Call operator API.
213   xnn_operator_t op = nullptr;
214   const xnn_status status =
215     xnn_create_copy_nc_x8(1, 1, 1, /*flags=*/0, &op);
216   if (status == xnn_status_unsupported_hardware) {
217     GTEST_SKIP();
218   }
219   ASSERT_EQ(xnn_status_success, status);
220   ASSERT_NE(nullptr, op);
221   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
222 
223   size_t batch_size = NumElements(dims);
224   ASSERT_EQ(
225     xnn_status_success,
226     xnn_setup_copy_nc_x8(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
227   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
228 
229   // Call subgraph API.
230   xnn_subgraph_t subgraph = nullptr;
231   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
232   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
233   input_id = XNN_INVALID_NODE_ID;
234   ASSERT_EQ(
235     xnn_status_success, xnn_define_quantized_tensor_value(
236                           subgraph, xnn_datatype_quint8, zero_point, scale, dims.size(), dims.data(),
237                           nullptr, /*external_id=*/0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
238   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
239 
240   output_id = XNN_INVALID_NODE_ID;
241   ASSERT_EQ(
242     xnn_status_success, xnn_define_quantized_tensor_value(
243                           subgraph, xnn_datatype_quint8, zero_point, scale, output_dims.size(), output_dims.data(),
244                           nullptr, /*external_id=*/1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
245   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
246 
247   ASSERT_EQ(xnn_status_success, xnn_define_static_reshape(subgraph, output_dims.size(), output_dims.data(), input_id, output_id, /*flags=*/0));
248 
249   xnn_runtime_t runtime = nullptr;
250   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
251   ASSERT_NE(nullptr, runtime);
252   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
253 
254   std::array<xnn_external_value, 2> external = {
255     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
256   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
257   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
258 
259   ASSERT_EQ(subgraph_output, operator_output);
260 }
261 
TEST_F(StaticReshapeTestF32,matches_operator_api)262 TEST_F(StaticReshapeTestF32, matches_operator_api)
263 {
264   std::generate(input.begin(), input.end(), [&]() { return f32dist(rng); });
265   std::fill(operator_output.begin(), operator_output.end(), nanf(""));
266   std::fill(subgraph_output.begin(), subgraph_output.end(), nanf(""));
267 
268   std::vector<size_t> output_dims = dims;
269   std::shuffle(output_dims.begin(), output_dims.end(), rng);
270 
271   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
272 
273   // Call operator API.
274   xnn_operator_t op = nullptr;
275   xnn_status status = xnn_create_copy_nc_x32(1, 1, 1, /*flags=*/0, &op);
276   if (status == xnn_status_unsupported_hardware) {
277     GTEST_SKIP();
278   }
279   ASSERT_EQ(xnn_status_success, status);
280   ASSERT_NE(nullptr, op);
281   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
282   size_t batch_size = NumElements(dims);
283   ASSERT_EQ(
284     xnn_status_success,
285     xnn_setup_copy_nc_x32(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
286   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
287 
288   // Call subgraph API.
289   xnn_subgraph_t subgraph = nullptr;
290   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
291   ASSERT_NE(nullptr, subgraph);
292   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
293   uint32_t input_id = XNN_INVALID_NODE_ID;
294   ASSERT_EQ(
295     xnn_status_success, xnn_define_tensor_value(
296                           subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, /*external_id=*/0,
297                           XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
298   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
299   uint32_t output_id = XNN_INVALID_NODE_ID;
300 
301   ASSERT_EQ(
302     xnn_status_success, xnn_define_tensor_value(
303                           subgraph, xnn_datatype_fp32, output_dims.size(), output_dims.data(), nullptr,
304                           /*external_id=*/1, XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
305   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
306   ASSERT_EQ(
307     xnn_status_success,
308     xnn_define_static_reshape(subgraph, output_dims.size(), output_dims.data(), input_id, output_id, /*flags=*/0));
309   xnn_runtime_t runtime = nullptr;
310   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
311   ASSERT_NE(nullptr, runtime);
312   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
313   std::array<xnn_external_value, 2> external = {
314     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
315   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
316   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
317 
318   ASSERT_EQ(subgraph_output, operator_output);
319 }
320