Home
last modified time | relevance | path

Searched refs:cn_stride (Results 1 – 25 of 2801) sorted by relevance

12345678910>>...113

/external/XNNPACK/test/
Dgemm-microkernel-tester.cc43 …std::vector<uint8_t> c((mr() - 1) * cm_stride() + ((n() - 1) / nr()) * cn_stride() + (n() - 1) % n… in Test()
91 c.data(), cm_stride() * sizeof(uint8_t), cn_stride() * sizeof(uint8_t), in Test()
103 … ASSERT_LE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmax())); in Test()
104 … ASSERT_GE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmin())); in Test()
105 …ASSERT_EQ(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(c_ref[i * n… in Test()
108 …<< "), optimized = " << (uint32_t) c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()] << ", … in Test()
133 …std::vector<uint8_t> c((mr() - 1) * cm_stride() + ((n() - 1) / nr()) * cn_stride() + (n() - 1) % n… in Test()
212 c.data(), cm_stride() * sizeof(uint8_t), cn_stride() * sizeof(uint8_t), in Test()
225 … ASSERT_LE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmax())); in Test()
226 … ASSERT_GE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmin())); in Test()
[all …]
/external/XNNPACK/src/f32-gemm/gen/
D8x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() argument
151 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
153 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
155 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
157 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
159 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
161 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
163 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
165 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
D7x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() argument
139 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
141 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
143 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
145 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
147 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
149 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
151 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
D6x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() argument
127 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
129 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
131 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
133 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
135 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
137 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
D4x2-scalar.c25 size_t cn_stride, in xnn_f32_gemm_ukernel_4x2__scalar() argument
96 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
99 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
102 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
105 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
D5x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() argument
115 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
117 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
119 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
121 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
123 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
D8x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast() argument
150 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
152 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
154 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
156 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
158 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
160 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
162 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
164 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
D4x2-relu-scalar.c25 size_t cn_stride, in xnn_f32_gemm_relu_ukernel_4x2__scalar() argument
104 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
107 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
110 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
113 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
D4x2-relu-wasm.c25 size_t cn_stride, in xnn_f32_gemm_relu_ukernel_4x2__wasm() argument
104 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
107 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
110 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
113 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
D7x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast() argument
138 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
140 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
142 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
144 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
146 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
148 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
150 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x8__fma3_broadcast()
/external/XNNPACK/src/f32-gemm/gen-inc/
D8x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() argument
153 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
155 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
157 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
159 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
161 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
163 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
165 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
167 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
D7x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() argument
141 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
143 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
145 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
147 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
149 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
151 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
153 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
D6x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() argument
129 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
131 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
133 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
135 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
137 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
139 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
D5x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() argument
117 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
119 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
121 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
123 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
125 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
D8x8inc-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast() argument
152 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
154 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
156 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
158 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
160 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
162 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
164 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
166 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
D7x8inc-minmax-avx-broadcast.c26 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast() argument
140 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
142 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
144 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
146 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
148 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
150 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
152 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__avx_broadcast()
D7x8inc-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast() argument
140 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
142 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
144 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
146 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
148 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
150 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
152 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x8__fma3_broadcast()
/external/XNNPACK/src/f32-igemm/gen/
D8x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast() argument
187 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
189 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
191 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
193 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
195 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
197 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
199 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
201 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
D7x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast() argument
172 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
174 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
176 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
178 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
180 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
182 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
184 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
D6x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast() argument
157 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
159 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
161 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
163 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
165 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
167 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
D8x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast() argument
185 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
187 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
189 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
191 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
193 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
195 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
197 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
199 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
D7x8-minmax-avx-broadcast.c26 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast() argument
170 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
172 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
174 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
176 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
178 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
180 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
182 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__avx_broadcast()
D7x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast() argument
170 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
172 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
174 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
176 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
178 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
180 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
182 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x8__fma3_broadcast()
/external/XNNPACK/src/f32-ppmm/gen/
D8x8-minmax-neon.c26 size_t cn_stride, in xnn_f32_ppmm_minmax_ukernel_8x8__neon() argument
150 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
153 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
156 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
159 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
162 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
165 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
168 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
171 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
D4x2-minmax-scalar.c24 size_t cn_stride, in xnn_f32_ppmm_minmax_ukernel_4x2__scalar() argument
114 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
115 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
116 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
117 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()

12345678910>>...113