• Home
  • Raw
  • Download

Lines Matching refs:v3

339 int8x8_t test_vmla_s8(int8x8_t v1, int8x8_t v2, int8x8_t v3) {  in test_vmla_s8()  argument
341 return vmla_s8(v1, v2, v3); in test_vmla_s8()
345 int8x8_t test_vmla_s16(int16x4_t v1, int16x4_t v2, int16x4_t v3) { in test_vmla_s16() argument
347 return vmla_s16(v1, v2, v3); in test_vmla_s16()
351 int32x2_t test_vmla_s32(int32x2_t v1, int32x2_t v2, int32x2_t v3) { in test_vmla_s32() argument
353 return vmla_s32(v1, v2, v3); in test_vmla_s32()
357 float32x2_t test_vmla_f32(float32x2_t v1, float32x2_t v2, float32x2_t v3) { in test_vmla_f32() argument
359 return vmla_f32(v1, v2, v3); in test_vmla_f32()
363 uint8x8_t test_vmla_u8(uint8x8_t v1, uint8x8_t v2, uint8x8_t v3) { in test_vmla_u8() argument
365 return vmla_u8(v1, v2, v3); in test_vmla_u8()
369 uint16x4_t test_vmla_u16(uint16x4_t v1, uint16x4_t v2, uint16x4_t v3) { in test_vmla_u16() argument
371 return vmla_u16(v1, v2, v3); in test_vmla_u16()
375 uint32x2_t test_vmla_u32(uint32x2_t v1, uint32x2_t v2, uint32x2_t v3) { in test_vmla_u32() argument
377 return vmla_u32(v1, v2, v3); in test_vmla_u32()
381 int8x16_t test_vmlaq_s8(int8x16_t v1, int8x16_t v2, int8x16_t v3) { in test_vmlaq_s8() argument
383 return vmlaq_s8(v1, v2, v3); in test_vmlaq_s8()
387 int16x8_t test_vmlaq_s16(int16x8_t v1, int16x8_t v2, int16x8_t v3) { in test_vmlaq_s16() argument
389 return vmlaq_s16(v1, v2, v3); in test_vmlaq_s16()
393 int32x4_t test_vmlaq_s32(int32x4_t v1, int32x4_t v2, int32x4_t v3) { in test_vmlaq_s32() argument
395 return vmlaq_s32(v1, v2, v3); in test_vmlaq_s32()
399 float32x4_t test_vmlaq_f32(float32x4_t v1, float32x4_t v2, float32x4_t v3) { in test_vmlaq_f32() argument
401 return vmlaq_f32(v1, v2, v3); in test_vmlaq_f32()
405 uint8x16_t test_vmlaq_u8(uint8x16_t v1, uint8x16_t v2, uint8x16_t v3) { in test_vmlaq_u8() argument
407 return vmlaq_u8(v1, v2, v3); in test_vmlaq_u8()
411 uint16x8_t test_vmlaq_u16(uint16x8_t v1, uint16x8_t v2, uint16x8_t v3) { in test_vmlaq_u16() argument
413 return vmlaq_u16(v1, v2, v3); in test_vmlaq_u16()
417 uint32x4_t test_vmlaq_u32(uint32x4_t v1, uint32x4_t v2, uint32x4_t v3) { in test_vmlaq_u32() argument
419 return vmlaq_u32(v1, v2, v3); in test_vmlaq_u32()
423 float64x2_t test_vmlaq_f64(float64x2_t v1, float64x2_t v2, float64x2_t v3) { in test_vmlaq_f64() argument
425 return vmlaq_f64(v1, v2, v3); in test_vmlaq_f64()
429 int8x8_t test_vmls_s8(int8x8_t v1, int8x8_t v2, int8x8_t v3) { in test_vmls_s8() argument
431 return vmls_s8(v1, v2, v3); in test_vmls_s8()
435 int8x8_t test_vmls_s16(int16x4_t v1, int16x4_t v2, int16x4_t v3) { in test_vmls_s16() argument
437 return vmls_s16(v1, v2, v3); in test_vmls_s16()
441 int32x2_t test_vmls_s32(int32x2_t v1, int32x2_t v2, int32x2_t v3) { in test_vmls_s32() argument
443 return vmls_s32(v1, v2, v3); in test_vmls_s32()
447 float32x2_t test_vmls_f32(float32x2_t v1, float32x2_t v2, float32x2_t v3) { in test_vmls_f32() argument
449 return vmls_f32(v1, v2, v3); in test_vmls_f32()
453 uint8x8_t test_vmls_u8(uint8x8_t v1, uint8x8_t v2, uint8x8_t v3) { in test_vmls_u8() argument
455 return vmls_u8(v1, v2, v3); in test_vmls_u8()
459 uint16x4_t test_vmls_u16(uint16x4_t v1, uint16x4_t v2, uint16x4_t v3) { in test_vmls_u16() argument
461 return vmls_u16(v1, v2, v3); in test_vmls_u16()
465 uint32x2_t test_vmls_u32(uint32x2_t v1, uint32x2_t v2, uint32x2_t v3) { in test_vmls_u32() argument
467 return vmls_u32(v1, v2, v3); in test_vmls_u32()
470 int8x16_t test_vmlsq_s8(int8x16_t v1, int8x16_t v2, int8x16_t v3) { in test_vmlsq_s8() argument
472 return vmlsq_s8(v1, v2, v3); in test_vmlsq_s8()
476 int16x8_t test_vmlsq_s16(int16x8_t v1, int16x8_t v2, int16x8_t v3) { in test_vmlsq_s16() argument
478 return vmlsq_s16(v1, v2, v3); in test_vmlsq_s16()
482 int32x4_t test_vmlsq_s32(int32x4_t v1, int32x4_t v2, int32x4_t v3) { in test_vmlsq_s32() argument
484 return vmlsq_s32(v1, v2, v3); in test_vmlsq_s32()
488 float32x4_t test_vmlsq_f32(float32x4_t v1, float32x4_t v2, float32x4_t v3) { in test_vmlsq_f32() argument
490 return vmlsq_f32(v1, v2, v3); in test_vmlsq_f32()
493 uint8x16_t test_vmlsq_u8(uint8x16_t v1, uint8x16_t v2, uint8x16_t v3) { in test_vmlsq_u8() argument
495 return vmlsq_u8(v1, v2, v3); in test_vmlsq_u8()
499 uint16x8_t test_vmlsq_u16(uint16x8_t v1, uint16x8_t v2, uint16x8_t v3) { in test_vmlsq_u16() argument
501 return vmlsq_u16(v1, v2, v3); in test_vmlsq_u16()
505 uint32x4_t test_vmlsq_u32(uint32x4_t v1, uint32x4_t v2, uint32x4_t v3) { in test_vmlsq_u32() argument
507 return vmlsq_u32(v1, v2, v3); in test_vmlsq_u32()
511 float64x2_t test_vmlsq_f64(float64x2_t v1, float64x2_t v2, float64x2_t v3) { in test_vmlsq_f64() argument
513 return vmlsq_f64(v1, v2, v3); in test_vmlsq_f64()
516 float32x2_t test_vfma_f32(float32x2_t v1, float32x2_t v2, float32x2_t v3) { in test_vfma_f32() argument
518 return vfma_f32(v1, v2, v3); in test_vfma_f32()
522 float32x4_t test_vfmaq_f32(float32x4_t v1, float32x4_t v2, float32x4_t v3) { in test_vfmaq_f32() argument
524 return vfmaq_f32(v1, v2, v3); in test_vfmaq_f32()
528 float64x2_t test_vfmaq_f64(float64x2_t v1, float64x2_t v2, float64x2_t v3) { in test_vfmaq_f64() argument
530 return vfmaq_f64(v1, v2, v3); in test_vfmaq_f64()
533 float32x2_t test_vfms_f32(float32x2_t v1, float32x2_t v2, float32x2_t v3) { in test_vfms_f32() argument
535 return vfms_f32(v1, v2, v3); in test_vfms_f32()
539 float32x4_t test_vfmsq_f32(float32x4_t v1, float32x4_t v2, float32x4_t v3) { in test_vfmsq_f32() argument
541 return vfmsq_f32(v1, v2, v3); in test_vfmsq_f32()
545 float64x2_t test_vfmsq_f64(float64x2_t v1, float64x2_t v2, float64x2_t v3) { in test_vfmsq_f64() argument
547 return vfmsq_f64(v1, v2, v3); in test_vfmsq_f64()
593 int8x8_t test_vaba_s8(int8x8_t v1, int8x8_t v2, int8x8_t v3) { in test_vaba_s8() argument
595 return vaba_s8(v1, v2, v3); in test_vaba_s8()
599 int16x4_t test_vaba_s16(int16x4_t v1, int16x4_t v2, int16x4_t v3) { in test_vaba_s16() argument
601 return vaba_s16(v1, v2, v3); in test_vaba_s16()
605 int32x2_t test_vaba_s32(int32x2_t v1, int32x2_t v2, int32x2_t v3) { in test_vaba_s32() argument
607 return vaba_s32(v1, v2, v3); in test_vaba_s32()
611 uint8x8_t test_vaba_u8(uint8x8_t v1, uint8x8_t v2, uint8x8_t v3) { in test_vaba_u8() argument
613 return vaba_u8(v1, v2, v3); in test_vaba_u8()
617 uint16x4_t test_vaba_u16(uint16x4_t v1, uint16x4_t v2, uint16x4_t v3) { in test_vaba_u16() argument
619 return vaba_u16(v1, v2, v3); in test_vaba_u16()
623 uint32x2_t test_vaba_u32(uint32x2_t v1, uint32x2_t v2, uint32x2_t v3) { in test_vaba_u32() argument
625 return vaba_u32(v1, v2, v3); in test_vaba_u32()
629 int8x16_t test_vabaq_s8(int8x16_t v1, int8x16_t v2, int8x16_t v3) { in test_vabaq_s8() argument
631 return vabaq_s8(v1, v2, v3); in test_vabaq_s8()
635 int16x8_t test_vabaq_s16(int16x8_t v1, int16x8_t v2, int16x8_t v3) { in test_vabaq_s16() argument
637 return vabaq_s16(v1, v2, v3); in test_vabaq_s16()
641 int32x4_t test_vabaq_s32(int32x4_t v1, int32x4_t v2, int32x4_t v3) { in test_vabaq_s32() argument
643 return vabaq_s32(v1, v2, v3); in test_vabaq_s32()
647 uint8x16_t test_vabaq_u8(uint8x16_t v1, uint8x16_t v2, uint8x16_t v3) { in test_vabaq_u8() argument
649 return vabaq_u8(v1, v2, v3); in test_vabaq_u8()
653 uint16x8_t test_vabaq_u16(uint16x8_t v1, uint16x8_t v2, uint16x8_t v3) { in test_vabaq_u16() argument
655 return vabaq_u16(v1, v2, v3); in test_vabaq_u16()
659 uint32x4_t test_vabaq_u32(uint32x4_t v1, uint32x4_t v2, uint32x4_t v3) { in test_vabaq_u32() argument
661 return vabaq_u32(v1, v2, v3); in test_vabaq_u32()
756 int8x8_t test_vbsl_s8(uint8x8_t v1, int8x8_t v2, int8x8_t v3) { in test_vbsl_s8() argument
758 return vbsl_s8(v1, v2, v3); in test_vbsl_s8()
762 int8x8_t test_vbsl_s16(uint16x4_t v1, int16x4_t v2, int16x4_t v3) { in test_vbsl_s16() argument
764 return vbsl_s16(v1, v2, v3); in test_vbsl_s16()
768 int32x2_t test_vbsl_s32(uint32x2_t v1, int32x2_t v2, int32x2_t v3) { in test_vbsl_s32() argument
770 return vbsl_s32(v1, v2, v3); in test_vbsl_s32()
774 uint64x1_t test_vbsl_s64(uint64x1_t v1, uint64x1_t v2, uint64x1_t v3) { in test_vbsl_s64() argument
776 return vbsl_s64(v1, v2, v3); in test_vbsl_s64()
780 uint8x8_t test_vbsl_u8(uint8x8_t v1, uint8x8_t v2, uint8x8_t v3) { in test_vbsl_u8() argument
782 return vbsl_u8(v1, v2, v3); in test_vbsl_u8()
786 uint16x4_t test_vbsl_u16(uint16x4_t v1, uint16x4_t v2, uint16x4_t v3) { in test_vbsl_u16() argument
788 return vbsl_u16(v1, v2, v3); in test_vbsl_u16()
792 uint32x2_t test_vbsl_u32(uint32x2_t v1, uint32x2_t v2, uint32x2_t v3) { in test_vbsl_u32() argument
794 return vbsl_u32(v1, v2, v3); in test_vbsl_u32()
798 uint64x1_t test_vbsl_u64(uint64x1_t v1, uint64x1_t v2, uint64x1_t v3) { in test_vbsl_u64() argument
800 return vbsl_u64(v1, v2, v3); in test_vbsl_u64()
804 float32x2_t test_vbsl_f32(float32x2_t v1, float32x2_t v2, float32x2_t v3) { in test_vbsl_f32() argument
806 return vbsl_f32(v1, v2, v3); in test_vbsl_f32()
810 poly8x8_t test_vbsl_p8(uint8x8_t v1, poly8x8_t v2, poly8x8_t v3) { in test_vbsl_p8() argument
812 return vbsl_p8(v1, v2, v3); in test_vbsl_p8()
816 poly16x4_t test_vbsl_p16(uint16x4_t v1, poly16x4_t v2, poly16x4_t v3) { in test_vbsl_p16() argument
818 return vbsl_p16(v1, v2, v3); in test_vbsl_p16()
822 int8x16_t test_vbslq_s8(uint8x16_t v1, int8x16_t v2, int8x16_t v3) { in test_vbslq_s8() argument
824 return vbslq_s8(v1, v2, v3); in test_vbslq_s8()
828 int16x8_t test_vbslq_s16(uint16x8_t v1, int16x8_t v2, int16x8_t v3) { in test_vbslq_s16() argument
830 return vbslq_s16(v1, v2, v3); in test_vbslq_s16()
834 int32x4_t test_vbslq_s32(uint32x4_t v1, int32x4_t v2, int32x4_t v3) { in test_vbslq_s32() argument
836 return vbslq_s32(v1, v2, v3); in test_vbslq_s32()
840 int64x2_t test_vbslq_s64(uint64x2_t v1, int64x2_t v2, int64x2_t v3) { in test_vbslq_s64() argument
842 return vbslq_s64(v1, v2, v3); in test_vbslq_s64()
846 uint8x16_t test_vbslq_u8(uint8x16_t v1, uint8x16_t v2, uint8x16_t v3) { in test_vbslq_u8() argument
848 return vbslq_u8(v1, v2, v3); in test_vbslq_u8()
852 uint16x8_t test_vbslq_u16(uint16x8_t v1, uint16x8_t v2, uint16x8_t v3) { in test_vbslq_u16() argument
854 return vbslq_u16(v1, v2, v3); in test_vbslq_u16()
858 int32x4_t test_vbslq_u32(uint32x4_t v1, int32x4_t v2, int32x4_t v3) { in test_vbslq_u32() argument
860 return vbslq_s32(v1, v2, v3); in test_vbslq_u32()
864 uint64x2_t test_vbslq_u64(uint64x2_t v1, uint64x2_t v2, uint64x2_t v3) { in test_vbslq_u64() argument
866 return vbslq_u64(v1, v2, v3); in test_vbslq_u64()
870 float32x4_t test_vbslq_f32(uint32x4_t v1, float32x4_t v2, float32x4_t v3) { in test_vbslq_f32() argument
872 return vbslq_f32(v1, v2, v3); in test_vbslq_f32()
876 poly8x16_t test_vbslq_p8(uint8x16_t v1, poly8x16_t v2, poly8x16_t v3) { in test_vbslq_p8() argument
878 return vbslq_p8(v1, v2, v3); in test_vbslq_p8()
882 poly16x8_t test_vbslq_p16(uint16x8_t v1, poly16x8_t v2, poly16x8_t v3) { in test_vbslq_p16() argument
884 return vbslq_p16(v1, v2, v3); in test_vbslq_p16()
888 float64x2_t test_vbslq_f64(uint64x2_t v1, float64x2_t v2, float64x2_t v3) { in test_vbslq_f64() argument
890 return vbslq_f64(v1, v2, v3); in test_vbslq_f64()