Home
last modified time | relevance | path

Searched refs:cn_stride (Results 1 – 25 of 901) sorted by relevance

12345678910>>...37

/external/XNNPACK/test/
Dgemm-microkernel-tester.h140 …return this->cm_stride_ == 0 ? cn_stride() * ((n() - 1) / nr()) + (n() - 1) % nr() + 1 : this->cm_… in cm_stride()
143 inline GemmMicrokernelTester& cn_stride(size_t cn_stride) { in cn_stride() function
144 this->cn_stride_ = cn_stride; in cn_stride()
148 inline size_t cn_stride() const { in cn_stride() function
227 …std::vector<uint8_t> c((mr() - 1) * cm_stride() + ((n() - 1) / nr()) * cn_stride() + (n() - 1) % n…
285 c.data(), cm_stride() * sizeof(uint8_t), cn_stride() * sizeof(uint8_t),
296 … ASSERT_LE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmax()));
297 … ASSERT_GE(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(qmin()));
298 …ASSERT_EQ(uint32_t(c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()]), uint32_t(c_ref[i * n…
301 …<< "), optimized = " << (uint32_t) c[i * cm_stride() + (j / nr()) * cn_stride() + j % nr()] << ", …
[all …]
/external/XNNPACK/src/f32-gemm/gen-inc/
D8x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() argument
145 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
147 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
149 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
151 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
153 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
155 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
157 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
159 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast()
D7x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() argument
134 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
136 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
138 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
140 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
142 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
144 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
146 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast()
D6x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() argument
123 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
125 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
127 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
129 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
131 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
133 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
D5x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() argument
112 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
114 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
116 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
118 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
120 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
D8x8inc-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast() argument
152 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
154 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
156 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
158 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
160 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
162 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
164 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
166 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_8x8__fma3_broadcast()
D4x16inc-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() argument
101 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast()
103 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast()
105 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast()
107 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast()
/external/XNNPACK/src/f32-gemm/gen/
D8x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() argument
143 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
145 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
147 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
149 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
151 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
153 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
155 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
157 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast()
D7x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() argument
132 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
134 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
136 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
138 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
140 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
142 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
144 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast()
D6x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() argument
121 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
123 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
125 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
127 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
129 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
131 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
D5x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() argument
110 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
112 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
114 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
116 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
118 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
D4x2-wasm.c25 size_t cn_stride, in xnn_f32_gemm_ukernel_4x2__wasm() argument
96 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_ukernel_4x2__wasm()
99 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_ukernel_4x2__wasm()
102 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_ukernel_4x2__wasm()
105 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_ukernel_4x2__wasm()
D4x2-scalar.c25 size_t cn_stride, in xnn_f32_gemm_ukernel_4x2__scalar() argument
96 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
99 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
102 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
105 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_ukernel_4x2__scalar()
D4x2-relu-wasm.c25 size_t cn_stride, in xnn_f32_gemm_relu_ukernel_4x2__wasm() argument
104 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
107 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
110 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
113 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
D4x2-relu-scalar.c25 size_t cn_stride, in xnn_f32_gemm_relu_ukernel_4x2__scalar() argument
104 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
107 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
110 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
113 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
D8x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast() argument
150 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
152 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
154 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
156 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
158 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
160 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
162 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
164 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_gemm_minmax_ukernel_8x8__fma3_broadcast()
/external/XNNPACK/src/f32-igemm/gen/
D8x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast() argument
179 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
181 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
183 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
185 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
187 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
189 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
191 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
193 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x16__avx512f_broadcast()
D7x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast() argument
165 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
167 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
169 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
171 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
173 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
175 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
177 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_7x16__avx512f_broadcast()
D6x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast() argument
151 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
153 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
155 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
157 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
159 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
161 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_6x16__avx512f_broadcast()
D5x16-minmax-avx512f-broadcast.c27 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast() argument
137 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast()
139 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast()
141 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast()
143 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast()
145 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_5x16__avx512f_broadcast()
D8x8-minmax-fma3-broadcast.c26 size_t cn_stride, in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast() argument
185 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
187 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
189 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
191 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
193 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
195 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
197 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
199 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_minmax_ukernel_8x8__fma3_broadcast()
D4x2-scalar.c25 size_t cn_stride, in xnn_f32_igemm_ukernel_4x2__scalar() argument
120 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_ukernel_4x2__scalar()
123 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_ukernel_4x2__scalar()
126 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_ukernel_4x2__scalar()
129 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_ukernel_4x2__scalar()
D4x2-wasm.c25 size_t cn_stride, in xnn_f32_igemm_ukernel_4x2__wasm() argument
120 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_igemm_ukernel_4x2__wasm()
123 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_igemm_ukernel_4x2__wasm()
126 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_igemm_ukernel_4x2__wasm()
129 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_igemm_ukernel_4x2__wasm()
/external/XNNPACK/src/f32-ppmm/gen/
D8x8-minmax-neon.c26 size_t cn_stride, in xnn_f32_ppmm_minmax_ukernel_8x8__neon() argument
150 c7 = (float*) ((uintptr_t) c7 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
153 c6 = (float*) ((uintptr_t) c6 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
156 c5 = (float*) ((uintptr_t) c5 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
159 c4 = (float*) ((uintptr_t) c4 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
162 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
165 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
168 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
171 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_8x8__neon()
D4x2-minmax-scalar.c24 size_t cn_stride, in xnn_f32_ppmm_minmax_ukernel_4x2__scalar() argument
114 c3 = (float*) ((uintptr_t) c3 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
115 c2 = (float*) ((uintptr_t) c2 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
116 c1 = (float*) ((uintptr_t) c1 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()
117 c0 = (float*) ((uintptr_t) c0 + cn_stride); in xnn_f32_ppmm_minmax_ukernel_4x2__scalar()

12345678910>>...37