/external/libgav1/libgav1/src/dsp/arm/ |
D | warp_neon.cc | 77 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 1)); in HorizontalFilter() 80 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 2)); in HorizontalFilter() 83 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 3)); in HorizontalFilter() 86 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 4)); in HorizontalFilter() 89 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 5)); in HorizontalFilter() 92 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 6)); in HorizontalFilter() 95 src_row_window = vget_low_s8(vextq_s8(src_row_centered, src_row_centered, 7)); in HorizontalFilter()
|
/external/XNNPACK/src/qs8-gemm/gen/ |
D | 8x8c4-minmax-neondot.c | 336 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 337 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 338 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 339 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 350 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 351 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 352 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot() 353 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x8c4__neondot()
|
D | 6x8c4-minmax-neondot.c | 276 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot() 277 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot() 278 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot() 287 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot() 288 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot() 289 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x8c4__neondot()
|
D | 4x8c4-minmax-neondot.c | 216 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c4__neondot() 217 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c4__neondot() 224 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c4__neondot() 225 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c4__neondot()
|
D | 8x16c4-minmax-neondot.c | 504 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 505 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 506 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 507 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 518 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 519 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 520 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot() 521 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_8x16c4__neondot()
|
D | 6x16c4-minmax-neondot.c | 404 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot() 405 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot() 406 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot() 415 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot() 416 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot() 417 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_6x16c4__neondot()
|
D | 4x16c4-minmax-neondot.c | 304 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x16c4__neondot() 305 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x16c4__neondot() 312 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x16c4__neondot() 313 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x16c4__neondot()
|
D | 4x8c2-minmax-neon-mull-padal-dup.c | 307 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 308 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 315 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 316 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c2__neon_mull_padal_dup()
|
D | 4x8c8-minmax-neon-mull-padal.c | 343 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() 344 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() 351 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() 352 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
D | 4x8-minmax-neon-mlal-lane.c | 360 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8__neon_mlal_lane() 361 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_gemm_minmax_ukernel_4x8__neon_mlal_lane() 368 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8__neon_mlal_lane() 369 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_gemm_minmax_ukernel_4x8__neon_mlal_lane()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 8x8c4-minmax-neondot.c | 350 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 351 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 352 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 353 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 364 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 365 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 366 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot() 367 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x8c4__neondot()
|
D | 6x8c4-minmax-neondot.c | 288 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot() 289 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot() 290 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot() 299 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot() 300 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot() 301 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x8c4__neondot()
|
D | 4x8c4-minmax-neondot.c | 226 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c4__neondot() 227 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c4__neondot() 234 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c4__neondot() 235 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c4__neondot()
|
D | 8x16c4-minmax-neondot.c | 518 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 519 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 520 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 521 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 532 … vout6x01234567_7x01234567 = vextq_s8(vout6x01234567_7x01234567, vout6x01234567_7x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 533 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 534 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot() 535 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_8x16c4__neondot()
|
D | 6x16c4-minmax-neondot.c | 416 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot() 417 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot() 418 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot() 427 … vout4x01234567_5x01234567 = vextq_s8(vout4x01234567_5x01234567, vout4x01234567_5x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot() 428 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot() 429 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_6x16c4__neondot()
|
D | 4x16c4-minmax-neondot.c | 314 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x16c4__neondot() 315 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x16c4__neondot() 322 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x16c4__neondot() 323 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x16c4__neondot()
|
D | 4x8c2-minmax-neon-mull-padal-dup.c | 324 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 325 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 332 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c2__neon_mull_padal_dup() 333 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c2__neon_mull_padal_dup()
|
D | 4x8c8-minmax-neon-mull-padal.c | 360 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() 361 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() 368 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() 369 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
D | 4x8-minmax-neon-mlal-lane.c | 376 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mlal_lane() 377 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mlal_lane() 384 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mlal_lane() 385 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mlal_lane()
|
D | 4x8c16-minmax-neon-mlal-padal.c | 392 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal() 393 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal() 400 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal() 401 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
|
D | 4x8-minmax-neon-mull-addw-dup.c | 412 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mull_addw_dup() 413 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mull_addw_dup() 420 … vout2x01234567_3x01234567 = vextq_s8(vout2x01234567_3x01234567, vout2x01234567_3x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mull_addw_dup() 421 … vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2); in xnn_qs8_igemm_minmax_ukernel_4x8__neon_mull_addw_dup()
|
/external/llvm-project/clang/test/Sema/ |
D | aarch64-neon-ranges.c | 10 vextq_s8(big, big, 15); in test_vext_8bit() 17 vextq_s8(big, big, 16); // expected-error-re {{argument value {{.*}} is outside the valid range}} in test_vext_8bit()
|
/external/clang/test/Sema/ |
D | aarch64-neon-ranges.c | 10 vextq_s8(big, big, 15); in test_vext_8bit() 17 vextq_s8(big, big, 16); // expected-error {{argument should be a value from 0 to 15}} in test_vext_8bit()
|
/external/clang/test/CodeGen/ |
D | aarch64-neon-extract.c | 53 return vextq_s8(a, b, 2); in test_vextq_s8()
|
/external/llvm-project/clang/test/CodeGen/ |
D | aarch64-neon-extract.c | 53 return vextq_s8(a, b, 2); in test_vextq_s8()
|