Home
last modified time | relevance | path

Searched defs:v0_0 (Results 1 – 25 of 38) sorted by relevance

12

/external/XNNPACK/src/x64-transpose/gen/
D2x2-reuse-multi-sse2.c51 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_multi_sse2() local
66 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_multi_sse2() local
D2x2-multi-multi-sse2.c53 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_multi_sse2() local
68 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_multi_sse2() local
D2x2-reuse-switch-sse2.c49 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_switch_sse2() local
71 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_switch_sse2() local
D2x2-reuse-mov-sse2.c51 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_mov_sse2() local
70 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_reuse_mov_sse2() local
D2x2-multi-switch-sse2.c51 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_switch_sse2() local
73 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_switch_sse2() local
D2x2-multi-mov-sse2.c53 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_mov_sse2() local
72 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_1); in xnn_x64_transpose_ukernel__2x2_multi_mov_sse2() local
/external/XNNPACK/src/x32-transpose/gen/
D4x4-reuse-multi-zip-neon.c62 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_multi_zip_neon() local
88 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_multi_zip_neon() local
D4x4-multi-multi-zip-neon.c66 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_multi_zip_neon() local
90 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_multi_zip_neon() local
D4x4-reuse-dec-zip-neon.c54 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_dec_zip_neon() local
91 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_dec_zip_neon() local
D4x4-reuse-multi-sse2.c68 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_multi_sse2() local
103 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_multi_sse2() local
D4x4-multi-dec-zip-neon.c58 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_dec_zip_neon() local
93 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_dec_zip_neon() local
D4x4-reuse-switch-zip-neon.c53 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_switch_zip_neon() local
89 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_switch_zip_neon() local
D4x4-reuse-mov-zip-neon.c54 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_mov_zip_neon() local
94 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_reuse_mov_zip_neon() local
D4x4-multi-switch-zip-neon.c57 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_switch_zip_neon() local
91 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_switch_zip_neon() local
D4x4-multi-mov-zip-neon.c58 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_mov_zip_neon() local
96 const uint32x4x2_t v0_0 = vzipq_u32(v1_0.val[0], v1_1.val[0]); in xnn_x32_transpose_ukernel__4x4_multi_mov_zip_neon() local
D4x4-multi-multi-sse2.c72 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_multi_sse2() local
105 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_multi_sse2() local
D4x4-reuse-mov-sse2.c60 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_mov_sse2() local
105 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_mov_sse2() local
D4x4-reuse-switch-sse2.c59 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_switch_sse2() local
103 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_reuse_switch_sse2() local
D4x4-multi-switch-sse2.c63 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_switch_sse2() local
105 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_switch_sse2() local
D4x4-multi-mov-sse2.c64 const __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_mov_sse2() local
107 __m128i v0_0 = _mm_unpacklo_epi64(v1_0, v1_2); in xnn_x32_transpose_ukernel__4x4_multi_mov_sse2() local
/external/XNNPACK/src/x16-transpose/gen/
D8x8-reuse-dec-zip-neon.c64 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_dec_zip_neon() local
145 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_dec_zip_neon() local
D8x8-reuse-switch-zip-neon.c63 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_switch_zip_neon() local
135 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_switch_zip_neon() local
D8x8-multi-dec-zip-neon.c72 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_multi_dec_zip_neon() local
147 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_multi_dec_zip_neon() local
D8x8-reuse-multi-zip-neon.c88 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_multi_zip_neon() local
146 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_multi_zip_neon() local
D8x8-reuse-mov-zip-neon.c64 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_mov_zip_neon() local
152 const uint16x8x2_t v0_0 = vzipq_u16(v1_0.val[0], v1_2.val[0]); in xnn_x16_transpose_ukernel__8x8_reuse_mov_zip_neon() local

12