• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 Google LLC
2 //
3 // This source code is licensed under the BSD-style license found in the
4 // LICENSE file in the root directory of this source tree.
5 
6 #include <stddef.h>
7 #include <stdint.h>
8 
9 #include <xnnpack.h>
10 #include <xnnpack/log.h>
11 #include <xnnpack/params.h>
12 #include <xnnpack/subgraph.h>
13 
14 
create_depth_to_space_operator(const struct xnn_node * node,const struct xnn_value * values,size_t num_values,struct xnn_operator_data * opdata)15 static enum xnn_status create_depth_to_space_operator(
16   const struct xnn_node* node,
17   const struct xnn_value* values,
18   size_t num_values,
19   struct xnn_operator_data* opdata)
20 {
21   assert(node->compute_type == xnn_compute_type_fp32);
22 
23   assert(node->num_inputs == 1);
24   const uint32_t input_id = node->inputs[0];
25   assert(input_id != XNN_INVALID_VALUE_ID);
26   assert(input_id < num_values);
27 
28   assert(node->num_outputs == 1);
29   const uint32_t output_id = node->outputs[0];
30   assert(output_id != XNN_INVALID_VALUE_ID);
31   assert(output_id < num_values);
32 
33   const size_t input_channel_dim = values[input_id].shape.dim[3];
34   const size_t output_channel_dim = values[output_id].shape.dim[3];
35 
36   enum xnn_status status;
37   if (values[input_id].layout == xnn_layout_type_nchw) {
38     assert(values[output_id].layout == xnn_layout_type_nhwc);
39     status = xnn_create_depth_to_space_nchw2nhwc_x32(
40         output_channel_dim /* output channels */,
41         input_channel_dim /* input stride */,
42         output_channel_dim /* output stride */,
43         node->params.depth_to_space.block_size,
44         node->flags,
45         &opdata->operator_object);
46   } else {
47     assert(values[input_id].layout == xnn_layout_type_nhwc);
48     assert(values[output_id].layout == xnn_layout_type_nhwc);
49     status = xnn_create_depth_to_space_nhwc_x32(
50         output_channel_dim /* output channels */,
51         input_channel_dim /* input stride */,
52         output_channel_dim /* output stride */,
53         node->params.depth_to_space.block_size,
54         node->flags,
55         &opdata->operator_object);
56   }
57   if (status == xnn_status_success) {
58     opdata->batch_size = values[input_id].shape.dim[0];
59     opdata->input_height = values[input_id].shape.dim[1];
60     opdata->input_width = values[input_id].shape.dim[2];
61     opdata->output_height = values[output_id].shape.dim[1];
62     opdata->output_width = values[output_id].shape.dim[2];
63     opdata->inputs[0] = input_id;
64     opdata->outputs[0] = output_id;
65   }
66   return status;
67 }
68 
setup_depth_to_space_operator(const struct xnn_operator_data * opdata,const struct xnn_blob * blobs,size_t num_blobs,pthreadpool_t threadpool)69 static enum xnn_status setup_depth_to_space_operator(
70   const struct xnn_operator_data* opdata,
71   const struct xnn_blob* blobs,
72   size_t num_blobs,
73   pthreadpool_t threadpool)
74 {
75   const uint32_t input_id = opdata->inputs[0];
76   assert(input_id != XNN_INVALID_VALUE_ID);
77   assert(input_id < num_blobs);
78 
79   const uint32_t output_id = opdata->outputs[0];
80   assert(output_id != XNN_INVALID_VALUE_ID);
81   assert(output_id < num_blobs);
82 
83   const struct xnn_blob* input_blob = blobs + input_id;
84   const void* input_data = input_blob->data;
85   assert(input_data != NULL);
86 
87   const struct xnn_blob* output_blob = blobs + output_id;
88   void* output_data = output_blob->data;
89   assert(output_data != NULL);
90 
91   switch (opdata->operator_object->type) {
92     case xnn_operator_type_depth_to_space_nchw2nhwc_x32:
93       return xnn_setup_depth_to_space_nchw2nhwc_x32(
94           opdata->operator_object,
95           opdata->batch_size,
96           opdata->input_height,
97           opdata->input_width,
98           input_data,
99           output_data,
100           threadpool);
101     case xnn_operator_type_depth_to_space_nhwc_x32:
102       return xnn_setup_depth_to_space_nhwc_x32(
103           opdata->operator_object,
104           opdata->batch_size,
105           opdata->input_height,
106           opdata->input_width,
107           input_data,
108           output_data,
109           threadpool);
110     default:
111       XNN_UNREACHABLE;
112   }
113 }
114 
xnn_define_depth_to_space(xnn_subgraph_t subgraph,uint32_t input_id,uint32_t output_id,uint32_t block_size,uint32_t flags)115 enum xnn_status xnn_define_depth_to_space(
116   xnn_subgraph_t subgraph,
117   uint32_t input_id,
118   uint32_t output_id,
119   uint32_t block_size,
120   uint32_t flags)
121 {
122   if ((xnn_params.init_flags & XNN_INIT_FLAG_XNNPACK) == 0) {
123     xnn_log_error("failed to define %s operator: XNNPACK is not initialized",
124       xnn_node_type_to_string(xnn_node_type_depth_to_space));
125     return xnn_status_uninitialized;
126   }
127 
128   if (input_id >= subgraph->num_values) {
129     xnn_log_error(
130       "failed to define %s operator with input ID #%" PRIu32 ": invalid Value ID",
131       xnn_node_type_to_string(xnn_node_type_depth_to_space), input_id);
132     return xnn_status_invalid_parameter;
133   }
134 
135   const struct xnn_value* input_value = &subgraph->values[input_id];
136   if (input_value->type != xnn_value_type_dense_tensor) {
137     xnn_log_error(
138       "failed to define %s operator with input ID #%" PRIu32 ": unsupported Value type %d (expected dense tensor)",
139       xnn_node_type_to_string(xnn_node_type_depth_to_space), input_id, input_value->type);
140     return xnn_status_invalid_parameter;
141   }
142 
143   switch (input_value->datatype) {
144     case xnn_datatype_fp32:
145       break;
146     default:
147       xnn_log_error(
148         "failed to define %s operator with input ID #%" PRIu32 ": unsupported Value datatype %s (%d)",
149         xnn_node_type_to_string(xnn_node_type_depth_to_space), input_id,
150         xnn_datatype_to_string(input_value->datatype), input_value->datatype);
151       return xnn_status_invalid_parameter;
152   }
153 
154   if (output_id >= subgraph->num_values) {
155     xnn_log_error(
156       "failed to define %s operator with output ID #%" PRIu32 ": invalid Value ID",
157       xnn_node_type_to_string(xnn_node_type_depth_to_space), output_id);
158     return xnn_status_invalid_parameter;
159   }
160 
161   const struct xnn_value* output_value = &subgraph->values[output_id];
162   if (output_value->type != xnn_value_type_dense_tensor) {
163     xnn_log_error(
164       "failed to define %s operator with output ID #%" PRIu32 ": unsupported Value type %d (expected dense tensor)",
165       xnn_node_type_to_string(xnn_node_type_depth_to_space), output_id, output_value->type);
166     return xnn_status_invalid_parameter;
167   }
168 
169   switch (output_value->datatype) {
170     case xnn_datatype_fp32:
171       break;
172     default:
173       xnn_log_error(
174         "failed to define %s operator with output ID #%" PRIu32 ": unsupported Value datatype %s (%d)",
175         xnn_node_type_to_string(xnn_node_type_depth_to_space), output_id,
176         xnn_datatype_to_string(output_value->datatype), output_value->datatype);
177       return xnn_status_invalid_parameter;
178   }
179 
180   if (block_size < 2) {
181     xnn_log_error(
182       "failed to define %s operator with block size #%" PRIu32 ": invalid block_size",
183       xnn_node_type_to_string(xnn_node_type_depth_to_space), block_size);
184     return xnn_status_invalid_parameter;
185   }
186 
187   struct xnn_node* node = xnn_subgraph_new_node(subgraph);
188   if (node == NULL) {
189     return xnn_status_out_of_memory;
190   }
191 
192   node->type = xnn_node_type_depth_to_space;
193   node->compute_type = xnn_compute_type_fp32;
194   node->num_inputs = 1;
195   node->inputs[0] = input_id;
196   node->num_outputs = 1;
197   node->outputs[0] = output_id;
198   node->params.depth_to_space.block_size = block_size;
199   node->flags = flags;
200 
201   node->create = create_depth_to_space_operator;
202   node->setup = setup_depth_to_space_operator;
203 
204   return xnn_status_success;
205 }
206