• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5 
6 #include <algorithm>
7 #include <array>
8 #include <cstddef>
9 #include <cstdint>
10 #include <memory>
11 #include <random>
12 
13 #include <xnnpack.h>
14 #include <xnnpack/node-type.h>
15 #include <xnnpack/operator.h>
16 #include <xnnpack/subgraph.h>
17 
18 #include "subgraph-unary-tester.h"
19 #include <gtest/gtest.h>
20 
21 using SigmoidTestQS8 = UnaryTest<int8_t>;
22 using SigmoidTestQU8 = UnaryTest<uint8_t>;
23 using SigmoidTestF32 = UnaryTest<float>;
24 
TEST_F(SigmoidTestQS8,define)25 TEST_F(SigmoidTestQS8, define)
26 {
27   const int32_t input_zero_point = i8dist(rng);
28   const float input_scale = scale_dist(rng);
29   const int32_t output_zero_point = 0;
30   const float output_scale = 0x1.0p-8f;
31 
32   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
33 
34   xnn_subgraph_t subgraph = nullptr;
35   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
36   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
37 
38   input_id = XNN_INVALID_NODE_ID;
39   ASSERT_EQ(
40     xnn_status_success, xnn_define_quantized_tensor_value(
41                           subgraph, xnn_datatype_qint8, input_zero_point, input_scale, dims.size(), dims.data(),
42                           nullptr, 0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
43   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
44 
45   output_id = XNN_INVALID_NODE_ID;
46   ASSERT_EQ(
47     xnn_status_success, xnn_define_quantized_tensor_value(
48                           subgraph, xnn_datatype_qint8, output_zero_point, output_scale, dims.size(), dims.data(),
49                           nullptr, 1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
50   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
51 
52   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
53 
54   ASSERT_EQ(subgraph->num_nodes, 1);
55   const struct xnn_node* node = &subgraph->nodes[0];
56   ASSERT_EQ(node->type, xnn_node_type_sigmoid);
57   ASSERT_EQ(node->compute_type, xnn_compute_type_qs8);
58   ASSERT_EQ(node->num_inputs, 1);
59   ASSERT_EQ(node->inputs[0], input_id);
60   ASSERT_EQ(node->num_outputs, 1);
61   ASSERT_EQ(node->outputs[0], output_id);
62   ASSERT_EQ(node->flags, 0);
63 }
64 
TEST_F(SigmoidTestQU8,define)65 TEST_F(SigmoidTestQU8, define)
66 {
67   const int32_t input_zero_point = u8dist(rng);
68   const float input_scale = scale_dist(rng);
69   const int32_t output_zero_point = 0;
70   const float output_scale = 0x1.0p-8f;
71 
72   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
73 
74   xnn_subgraph_t subgraph = nullptr;
75   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
76   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
77 
78   input_id = XNN_INVALID_NODE_ID;
79   ASSERT_EQ(
80     xnn_status_success, xnn_define_quantized_tensor_value(
81                           subgraph, xnn_datatype_quint8, input_zero_point, input_scale, dims.size(), dims.data(),
82                           nullptr, 0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
83   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
84 
85   output_id = XNN_INVALID_NODE_ID;
86   ASSERT_EQ(
87     xnn_status_success, xnn_define_quantized_tensor_value(
88                           subgraph, xnn_datatype_quint8, output_zero_point, output_scale, dims.size(), dims.data(),
89                           nullptr, 1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
90   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
91 
92   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
93 
94   ASSERT_EQ(subgraph->num_nodes, 1);
95   const struct xnn_node* node = &subgraph->nodes[0];
96   ASSERT_EQ(node->type, xnn_node_type_sigmoid);
97   ASSERT_EQ(node->compute_type, xnn_compute_type_qu8);
98   ASSERT_EQ(node->num_inputs, 1);
99   ASSERT_EQ(node->inputs[0], input_id);
100   ASSERT_EQ(node->num_outputs, 1);
101   ASSERT_EQ(node->outputs[0], output_id);
102   ASSERT_EQ(node->flags, 0);
103 }
104 
TEST_F(SigmoidTestF32,define)105 TEST_F(SigmoidTestF32, define)
106 {
107   std::uniform_real_distribution<float> f32dist(-255.0f, 255.0f);
108   std::generate(input.begin(), input.end(), [&]() { return f32dist(rng); });
109 
110   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
111 
112   xnn_subgraph_t subgraph = nullptr;
113   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
114   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
115 
116   input_id = XNN_INVALID_NODE_ID;
117   ASSERT_EQ(
118     xnn_status_success, xnn_define_tensor_value(
119                           subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, 0,
120                           /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
121   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
122 
123   output_id = XNN_INVALID_NODE_ID;
124   ASSERT_EQ(
125     xnn_status_success, xnn_define_tensor_value(
126                           subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, 1,
127                           /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
128   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
129 
130   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
131 
132   ASSERT_EQ(subgraph->num_nodes, 1);
133   const struct xnn_node* node = &subgraph->nodes[0];
134   ASSERT_EQ(node->type, xnn_node_type_sigmoid);
135   ASSERT_EQ(node->compute_type, xnn_compute_type_fp32);
136   ASSERT_EQ(node->num_inputs, 1);
137   ASSERT_EQ(node->inputs[0], input_id);
138   ASSERT_EQ(node->num_outputs, 1);
139   ASSERT_EQ(node->outputs[0], output_id);
140   ASSERT_EQ(node->flags, 0);
141 }
142 
TEST_F(SigmoidTestQS8,matches_operator_api)143 TEST_F(SigmoidTestQS8, matches_operator_api)
144 {
145   const int32_t input_zero_point = i8dist(rng);
146   const float input_scale = scale_dist(rng);
147   const int32_t output_zero_point = INT8_MIN;
148   const float output_scale = 0x1.0p-8f;
149   std::generate(input.begin(), input.end(), [&]() { return i8dist(rng); });
150   std::fill(operator_output.begin(), operator_output.end(), INT8_C(0xA5));
151   std::fill(subgraph_output.begin(), subgraph_output.end(), INT8_C(0xA5));
152 
153   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
154 
155   // Call operator API.
156   xnn_operator_t op = nullptr;
157   const xnn_status status = xnn_create_sigmoid_nc_qs8(
158     channels, channels, channels, input_zero_point, input_scale, output_zero_point, output_scale, INT8_MIN,
159     INT8_MAX, /*flags=*/0, &op);
160   if (status == xnn_status_unsupported_hardware) {
161     GTEST_SKIP();
162   }
163   ASSERT_EQ(xnn_status_success, status);
164   ASSERT_NE(nullptr, op);
165   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
166 
167   ASSERT_EQ(
168     xnn_status_success,
169     xnn_setup_sigmoid_nc_qs8(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
170   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
171 
172   // Call subgraph API.
173   xnn_subgraph_t subgraph = nullptr;
174   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
175   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
176   input_id = XNN_INVALID_NODE_ID;
177   ASSERT_EQ(
178     xnn_status_success, xnn_define_quantized_tensor_value(
179                           subgraph, xnn_datatype_qint8, input_zero_point, input_scale, dims.size(), dims.data(),
180                           nullptr, /*external_id=*/0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
181   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
182 
183   output_id = XNN_INVALID_NODE_ID;
184   ASSERT_EQ(
185     xnn_status_success, xnn_define_quantized_tensor_value(
186                           subgraph, xnn_datatype_qint8, output_zero_point, output_scale, dims.size(), dims.data(),
187                           nullptr, /*external_id=*/1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
188   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
189 
190   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
191 
192   xnn_runtime_t runtime = nullptr;
193   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
194   ASSERT_NE(nullptr, runtime);
195   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
196 
197   std::array<xnn_external_value, 2> external = {
198     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
199   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
200   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
201 
202   ASSERT_EQ(subgraph_output, operator_output);
203 }
204 
TEST_F(SigmoidTestQU8,matches_operator_api)205 TEST_F(SigmoidTestQU8, matches_operator_api)
206 {
207 
208   const int32_t input_zero_point = u8dist(rng);
209   const float input_scale = scale_dist(rng);
210   const int32_t output_zero_point = 0;
211   const float output_scale = 0x1.0p-8f;
212   std::generate(input.begin(), input.end(), [&]() { return u8dist(rng); });
213   std::fill(operator_output.begin(), operator_output.end(), UINT8_C(0xA5));
214   std::fill(subgraph_output.begin(), subgraph_output.end(), UINT8_C(0xA5));
215 
216   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
217 
218   // Call operator API.
219   xnn_operator_t op = nullptr;
220   const xnn_status status = xnn_create_sigmoid_nc_qu8(
221     channels, channels, channels, input_zero_point, input_scale, output_zero_point, output_scale, 0,
222     UINT8_MAX, /*flags=*/0, &op);
223   if (status == xnn_status_unsupported_hardware) {
224     GTEST_SKIP();
225   }
226   ASSERT_EQ(xnn_status_success, status);
227   ASSERT_NE(nullptr, op);
228   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
229 
230   ASSERT_EQ(
231     xnn_status_success,
232     xnn_setup_sigmoid_nc_qu8(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
233   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
234 
235   // Call subgraph API.
236   xnn_subgraph_t subgraph = nullptr;
237   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
238   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
239   input_id = XNN_INVALID_NODE_ID;
240   ASSERT_EQ(
241     xnn_status_success, xnn_define_quantized_tensor_value(
242                           subgraph, xnn_datatype_quint8, input_zero_point, input_scale, dims.size(), dims.data(),
243                           nullptr, /*external_id=*/0, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
244   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
245 
246   output_id = XNN_INVALID_NODE_ID;
247   ASSERT_EQ(
248     xnn_status_success, xnn_define_quantized_tensor_value(
249                           subgraph, xnn_datatype_quint8, output_zero_point, output_scale, dims.size(), dims.data(),
250                           nullptr, /*external_id=*/1, /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
251   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
252 
253   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
254 
255   xnn_runtime_t runtime = nullptr;
256   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
257   ASSERT_NE(nullptr, runtime);
258   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
259 
260   std::array<xnn_external_value, 2> external = {
261     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
262   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
263   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
264 
265   ASSERT_EQ(subgraph_output, operator_output);
266 }
267 
TEST_F(SigmoidTestF32,matches_operator_api)268 TEST_F(SigmoidTestF32, matches_operator_api)
269 {
270   std::uniform_real_distribution<float> f32dist(-25.0f, 25.0f);
271   std::generate(input.begin(), input.end(), [&]() { return f32dist(rng); });
272 
273   ASSERT_EQ(xnn_status_success, xnn_initialize(/*allocator=*/nullptr));
274 
275   // Call operator API.
276   xnn_operator_t op = nullptr;
277   const xnn_status status = xnn_create_sigmoid_nc_f32(channels, channels, channels, /*flags=*/0, &op);
278   if (status == xnn_status_unsupported_hardware) {
279     GTEST_SKIP();
280   }
281 
282   ASSERT_EQ(xnn_status_success, status);
283   ASSERT_NE(nullptr, op);
284   std::unique_ptr<xnn_operator, decltype(&xnn_delete_operator)> auto_op(op, xnn_delete_operator);
285 
286   ASSERT_EQ(
287     xnn_status_success,
288     xnn_setup_sigmoid_nc_f32(op, batch_size, input.data(), operator_output.data(), /*threadpool=*/nullptr));
289 
290   ASSERT_EQ(xnn_status_success, xnn_run_operator(op, /*threadpool=*/nullptr));
291 
292   // Call subgraph API.
293   xnn_subgraph_t subgraph = nullptr;
294   ASSERT_EQ(xnn_status_success, xnn_create_subgraph(/*external_value_ids=*/2, /*flags=*/0, &subgraph));
295   std::unique_ptr<xnn_subgraph, decltype(&xnn_delete_subgraph)> auto_subgraph(subgraph, xnn_delete_subgraph);
296   input_id = XNN_INVALID_NODE_ID;
297   ASSERT_EQ(
298     xnn_status_success, xnn_define_tensor_value(
299                           subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, /*external_id=*/0,
300                           /*flags=*/XNN_VALUE_FLAG_EXTERNAL_INPUT, &input_id));
301   ASSERT_NE(input_id, XNN_INVALID_NODE_ID);
302 
303   output_id = XNN_INVALID_NODE_ID;
304   ASSERT_EQ(
305     xnn_status_success, xnn_define_tensor_value(
306                           subgraph, xnn_datatype_fp32, dims.size(), dims.data(), nullptr, /*external_id=*/1,
307                           /*flags=*/XNN_VALUE_FLAG_EXTERNAL_OUTPUT, &output_id));
308   ASSERT_NE(output_id, XNN_INVALID_NODE_ID);
309 
310   xnn_runtime_t runtime = nullptr;
311   ASSERT_EQ(xnn_status_success, xnn_define_sigmoid(subgraph, input_id, output_id, /*flags=*/0));
312   ASSERT_EQ(xnn_status_success, xnn_create_runtime_v3(subgraph, nullptr, nullptr, /*flags=*/0, &runtime));
313   ASSERT_NE(nullptr, runtime);
314   std::unique_ptr<xnn_runtime, decltype(&xnn_delete_runtime)> auto_runtime(runtime, xnn_delete_runtime);
315   std::array<xnn_external_value, 2> external = {
316     xnn_external_value{input_id, input.data()}, xnn_external_value{output_id, subgraph_output.data()}};
317   ASSERT_EQ(xnn_status_success, xnn_setup_runtime(runtime, external.size(), external.data()));
318   ASSERT_EQ(xnn_status_success, xnn_invoke_runtime(runtime));
319 
320   ASSERT_EQ(subgraph_output, operator_output);
321 }
322