Home
last modified time | relevance | path

Searched refs:input_x (Results 1 – 25 of 84) sorted by relevance

1234

/external/toybox/kconfig/lxdialog/
Dinputbox.c48 int input_x = 0, scroll = 0, key = 0, button = -1; in dialog_inputbox() local
100 input_x = strlen(instr); in dialog_inputbox()
102 if (input_x >= box_width) { in dialog_inputbox()
103 scroll = input_x - box_width + 1; in dialog_inputbox()
104 input_x = box_width - 1; in dialog_inputbox()
111 wmove(dialog, box_y, box_x + input_x); in dialog_inputbox()
130 if (input_x || scroll) { in dialog_inputbox()
132 if (!input_x) { in dialog_inputbox()
137 instr[scroll + input_x + i] ? in dialog_inputbox()
138 instr[scroll + input_x + i] : ' '); in dialog_inputbox()
[all …]
/external/tensorflow/tensorflow/lite/kernels/
Dselect.cc67 const TfLiteTensor* input_x; in SelectPrepare() local
69 GetInputSafe(context, node, kInputTensorX, &input_x)); in SelectPrepare()
79 TF_LITE_ENSURE_TYPES_EQ(context, input_x->type, input_y->type); in SelectPrepare()
80 output->type = input_x->type; in SelectPrepare()
82 bool same_shape = HaveSameShapes(input_condition, input_x) && in SelectPrepare()
83 HaveSameShapes(input_x, input_y); in SelectPrepare()
91 SizeOfDimension(input_condition, 0) == SizeOfDimension(input_x, 0); in SelectPrepare()
96 output_size = TfLiteIntArrayCopy(input_x->dims); in SelectPrepare()
99 TF_LITE_ENSURE(context, HaveSameShapes(input_x, input_y)); in SelectPrepare()
104 context, input_condition, input_x, in SelectPrepare()
[all …]
/external/XNNPACK/src/
Dindirection.c56 … const size_t input_x = output_x * stride_width + kernel_x * dilation_width - input_padding_left; in xnn_indirection_init_conv2d() local
59 if (input_x < input_width) { in xnn_indirection_init_conv2d()
61 ((uintptr_t) input + (input_y * input_width + input_x) * input_pixel_stride); in xnn_indirection_init_conv2d()
119 const size_t input_x = fxdiv_quotient_size_t(x, stride_width_divisor); in xnn_indirection_init_deconv2d() local
122 …nput_y * stride_height == y && input_y < input_height && input_x * stride_width == x && input_x < … in xnn_indirection_init_deconv2d()
123 …fer[index] = (const void*) ((uintptr_t) input + (input_y * input_width + input_x) * input_pixel_st… in xnn_indirection_init_deconv2d()
181 const size_t input_x = x / stride_width; in xnn_indirection_init_subconv2d() local
183 if (input_y < input_height && input_x < input_width) { in xnn_indirection_init_subconv2d()
185 … (const void*) ((uintptr_t) input + (input_y * input_width + input_x) * input_pixel_stride); in xnn_indirection_init_subconv2d()
227 … const size_t input_x = output_x * stride_width + kernel_x * dilation_width - input_padding_left; in xnn_indirection_init_dwconv2d() local
[all …]
/external/XNNPACK/src/qs8-vaddc/gen/
Dminmax-sse41-mul32-ld32-x32.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32() argument
37 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
39 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
40 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
41 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
42 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
43 const __m128i vxOPQR = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 24)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
44 const __m128i vxSTUV = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 28)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
45 input_x += 32; in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x32()
[all …]
Dminmax-xop-mul32-ld32-x32.c25 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32() argument
42 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
44 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
45 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
46 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
47 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
48 const __m128i vxOPQR = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 24)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
49 const __m128i vxSTUV = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 28)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
50 input_x += 32; in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x32()
[all …]
Dminmax-xop-mul32-ld32-x24.c25 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24() argument
42 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
44 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
45 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
46 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
47 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
48 input_x += 24; in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
93 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
94 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x24()
[all …]
Dminmax-neon-ld64-x32.c22 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32() argument
40 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32()
41 const int8x8_t vx89ABCDEF = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32()
42 const int8x8_t vxGHIJKLMN = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32()
43 const int8x8_t vxOPQRSTUV = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32()
96 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x32()
Dminmax-sse41-mul32-ld32-x24.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24() argument
37 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
39 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
40 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
41 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
42 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
43 input_x += 24; in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
88 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
89 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x24()
[all …]
Dminmax-sse41-mul32-ld32-x16.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16() argument
37 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
39 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
40 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
41 input_x += 16; in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
75 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
76 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
77 input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x16()
Dminmax-xop-mul32-ld32-x16.c25 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16() argument
42 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
44 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
45 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
46 input_x += 16; in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
80 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
81 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
82 input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__xop_mul32_ld32_x16()
Dminmax-neon-ld64-x24.c22 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x24() argument
40 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x24()
41 const int8x8_t vx89ABCDEF = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x24()
42 const int8x8_t vxGHIJKLMN = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x24()
87 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x24()
Dminmax-avx2-mul32-ld64-x32.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32() argument
37 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
38 … const __m256i vx89ABCDEF = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 8))); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
39 … const __m256i vxGHIJKLMN = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 16))); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
40 … const __m256i vxOPQRSTUV = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 24))); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
41 input_x += 32; in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
73 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
74 input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x32()
Dminmax-avx2-mul32-ld64-x24.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24() argument
37 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
38 … const __m256i vx89ABCDEF = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 8))); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
39 … const __m256i vxGHIJKLMN = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 16))); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
40 input_x += 24; in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
69 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
70 input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__avx2_mul32_ld64_x24()
Dminmax-neon-ld64-x16.c22 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x16() argument
40 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x16()
41 const int8x8_t vx89ABCDEF = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x16()
74 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__neon_ld64_x16()
Dminmax-sse41-mul32-ld32-x8.c20 const int8_t* input_x, in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8() argument
37 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8()
39 input_x += 8; in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8()
64 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8()
65 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vaddc_minmax_ukernel__sse41_mul32_ld32_x8()
/external/XNNPACK/src/qs8-vadd/gen/
Dminmax-sse41-mul32-ld32-x32.c20 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32() argument
36 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
40 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
42 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
44 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
46 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
48 const __m128i vxOPQR = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 24)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
50 const __m128i vxSTUV = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 28)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
52 input_x += 32; in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x32()
[all …]
Dminmax-xop-mul32-ld32-x32.c25 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32() argument
41 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
45 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
47 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
49 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
51 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
53 const __m128i vxOPQR = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 24)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
55 const __m128i vxSTUV = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 28)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
57 input_x += 32; in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x32()
[all …]
Dminmax-sse41-mul32-ld32-x24.c20 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24() argument
36 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
40 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
42 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
44 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
46 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
48 input_x += 24; in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
100 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
102 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x24()
[all …]
Dminmax-xop-mul32-ld32-x24.c25 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24() argument
41 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
45 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
47 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
49 const __m128i vxGHIJ = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 16)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
51 const __m128i vxKLMN = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 20)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
53 input_x += 24; in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
105 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
107 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x24()
[all …]
Dminmax-neon-ld64-x32.c19 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32() argument
35 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32()
37 const int8x8_t vx89ABCDEF = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32()
39 const int8x8_t vxGHIJKLMN = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32()
41 const int8x8_t vxOPQRSTUV = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32()
108 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x32()
Dminmax-xop-mul32-ld32-x16.c25 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16() argument
41 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
43 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
45 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
47 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
49 input_x += 16; in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
88 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
90 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
92 input_x += 8; in xnn_qs8_vadd_minmax_ukernel__xop_mul32_ld32_x16()
Dminmax-sse41-mul32-ld32-x16.c20 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16() argument
36 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
38 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
40 const __m128i vx89AB = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 8)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
42 const __m128i vxCDEF = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 12)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
44 input_x += 16; in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
83 const __m128i vx0123 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
85 const __m128i vx4567 = _mm_cvtepi8_epi32(_mm_loadu_si32(input_x + 4)); in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
87 input_x += 8; in xnn_qs8_vadd_minmax_ukernel__sse41_mul32_ld32_x16()
Dminmax-neon-ld64-x24.c19 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x24() argument
35 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x24()
37 const int8x8_t vx89ABCDEF = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x24()
39 const int8x8_t vxGHIJKLMN = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x24()
95 const int8x8_t vx01234567 = vld1_s8(input_x); input_x += 8; in xnn_qs8_vadd_minmax_ukernel__neon_ld64_x24()
Dminmax-avx2-mul32-ld64-x32.c20 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32() argument
36 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
38 … const __m256i vx89ABCDEF = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 8))); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
40 … const __m256i vxGHIJKLMN = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 16))); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
42 … const __m256i vxOPQRSTUV = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 24))); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
44 input_x += 32; in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
82 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
84 input_x += 8; in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x32()
Dminmax-avx2-mul32-ld64-x24.c20 const int8_t* input_x, in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24() argument
36 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()
38 … const __m256i vx89ABCDEF = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 8))); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()
40 … const __m256i vxGHIJKLMN = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) (input_x + 16))); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()
42 input_x += 24; in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()
76 const __m256i vx01234567 = _mm256_cvtepi8_epi32(_mm_loadl_epi64((const __m128i*) input_x)); in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()
78 input_x += 8; in xnn_qs8_vadd_minmax_ukernel__avx2_mul32_ld64_x24()

1234