Home
last modified time | relevance | path

Searched defs:va2 (Results 1 – 25 of 1288) sorted by relevance

12345678910>>...52

/external/XNNPACK/src/f32-gemm/gen/
D4x2-minmax-neon-lane-ld64.c70 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemm_minmax_ukernel_4x2__neon_lane_ld64() local
89 const float32x2_t va2 = vld1_dup_f32(a2); a2 += 1; in xnn_f32_gemm_minmax_ukernel_4x2__neon_lane_ld64() local
D4x2-minmax-neonfma-lane-ld64.c70 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemm_minmax_ukernel_4x2__neonfma_lane_ld64() local
111 const float32x2_t va2 = vld1_dup_f32(a2); a2 += 1; in xnn_f32_gemm_minmax_ukernel_4x2__neonfma_lane_ld64() local
D6x2-minmax-neon-lane-ld64.c84 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemm_minmax_ukernel_6x2__neon_lane_ld64() local
109 const float32x2_t va2 = vld1_dup_f32(a2); a2 += 1; in xnn_f32_gemm_minmax_ukernel_6x2__neon_lane_ld64() local
D3x8s4-wasmrelaxedsimd-fma.c68 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_3x8s4__wasmrelaxedsimd_fma() local
133 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_3x8s4__wasmrelaxedsimd_fma() local
D3x8s4-wasmsimd.c68 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_3x8s4__wasmsimd() local
133 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_3x8s4__wasmsimd() local
D3x8s4-minmax-sse.c68 __m128 va2 = _mm_loadu_ps(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__sse() local
133 __m128 va2 = _mm_loadu_ps(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__sse() local
D3x8s4-relu-wasmsimd.c68 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_relu_ukernel_3x8s4__wasmsimd() local
133 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_relu_ukernel_3x8s4__wasmsimd() local
D4x8-minmax-neonfma-lane-ld64.c74 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemm_minmax_ukernel_4x8__neonfma_lane_ld64() local
103 const float32x4_t va2 = vld1q_dup_f32(a2); a2 += 1; in xnn_f32_gemm_minmax_ukernel_4x8__neonfma_lane_ld64() local
D3x8s4-minmax-wasmsimd-arm.c70 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmsimd_arm() local
135 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmsimd_arm() local
D4x8-minmax-neon-lane-ld64.c74 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemm_minmax_ukernel_4x8__neon_lane_ld64() local
103 const float32x4_t va2 = vld1q_dup_f32(a2); a2 += 1; in xnn_f32_gemm_minmax_ukernel_4x8__neon_lane_ld64() local
D3x8s4-minmax-wasmrelaxedsimd.c70 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmrelaxedsimd() local
135 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmrelaxedsimd() local
D3x8s4-relu-wasmrelaxedsimd-fma.c68 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_relu_ukernel_3x8s4__wasmrelaxedsimd_fma() local
133 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_relu_ukernel_3x8s4__wasmrelaxedsimd_fma() local
D4x2c4-wasmsimd.c76 const v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_4x2c4__wasmsimd() local
99 const v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_ukernel_4x2c4__wasmsimd() local
D3x8s4-minmax-wasmsimd-x86.c70 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmsimd_x86() local
135 v128_t va2 = wasm_v128_load(a2); in xnn_f32_gemm_minmax_ukernel_3x8s4__wasmsimd_x86() local
/external/XNNPACK/src/f32-igemm/gen/
D4x2-minmax-neon-lane-ld64.c92 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neon_lane_ld64() local
111 const float32x2_t va2 = vld1_dup_f32(a2); in xnn_f32_igemm_minmax_ukernel_4x2__neon_lane_ld64() local
D4x4-minmax-neon-lane-ld64.c92 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_igemm_minmax_ukernel_4x4__neon_lane_ld64() local
112 const float32x4_t va2 = vld1q_dup_f32(a2); in xnn_f32_igemm_minmax_ukernel_4x4__neon_lane_ld64() local
D4x4-minmax-neonfma-lane-ld64.c92 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_igemm_minmax_ukernel_4x4__neonfma_lane_ld64() local
112 const float32x4_t va2 = vld1q_dup_f32(a2); in xnn_f32_igemm_minmax_ukernel_4x4__neonfma_lane_ld64() local
D4x2-minmax-neonfma-lane-ld64.c92 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64() local
133 const float32x2_t va2 = vld1_dup_f32(a2); in xnn_f32_igemm_minmax_ukernel_4x2__neonfma_lane_ld64() local
/external/clang/test/Preprocessor/
Dmacro_fn_varargs_iso.c8 #define va2(a, b, ...) foo2{__VA_ARGS__, b, a} macro
/external/XNNPACK/src/qs8-gemm/gen/
D3x4c2-xw-minmax-fp32-avx.c71 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__avx() local
118 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__avx() local
D3x4c2-xw-minmax-fp32-xop.c76 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__xop() local
123 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__xop() local
D3x4c2-xw-minmax-fp32-sse41.c71 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__sse41() local
118 const __m128i va2 = _mm_loadl_epi64((const __m128i*) a2); in xnn_qs8_gemm_xw_minmax_fp32_ukernel_3x4c2__sse41() local
/external/XNNPACK/src/f32-gemm/gen-inc/
D4x8inc-minmax-neonfma-lane-ld64.c76 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemminc_minmax_ukernel_4x8__neonfma_lane_ld64() local
105 const float32x4_t va2 = vld1q_dup_f32(a2); a2 += 1; in xnn_f32_gemminc_minmax_ukernel_4x8__neonfma_lane_ld64() local
D3x8s4inc-minmax-sse.c70 __m128 va2 = _mm_loadu_ps(a2); in xnn_f32_gemminc_minmax_ukernel_3x8s4__sse() local
135 __m128 va2 = _mm_loadu_ps(a2); in xnn_f32_gemminc_minmax_ukernel_3x8s4__sse() local
D4x8inc-minmax-neon-lane-ld64.c76 const float32x2_t va2 = vld1_f32(a2); a2 += 2; in xnn_f32_gemminc_minmax_ukernel_4x8__neon_lane_ld64() local
105 const float32x4_t va2 = vld1q_dup_f32(a2); a2 += 1; in xnn_f32_gemminc_minmax_ukernel_4x8__neon_lane_ld64() local

12345678910>>...52