Home
last modified time | relevance | path

Searched refs:vx1 (Results 1 – 25 of 186) sorted by relevance

12345678

/external/eigen/test/
Darray_replicate.cpp32 VectorX vx1; in replicate() local
52 vx1.resize(3*rows,cols); in replicate()
53 vx1 << m2, m2, m2; in replicate()
54 VERIFY_IS_APPROX(vx1+vx1, vx1+(m2.template replicate<3,1>())); in replicate()
56 vx1=m2+(m2.colwise().replicate(1)); in replicate()
66 vx1.resize(rows*f2); in replicate()
68 vx1.segment(j*rows,rows) = v1; in replicate()
69 VERIFY_IS_APPROX(vx1, v1.colwise().replicate(f2)); in replicate()
/external/linux-kselftest/tools/testing/selftests/net/forwarding/
Dvxlan_bridge_1d.sh136 ip link add name vx1 type vxlan id 1000 \
139 ip link set dev vx1 up
141 ip link set dev vx1 master br1
148 bridge fdb append dev vx1 00:00:00:00:00:00 dst 192.0.2.34 self
149 bridge fdb append dev vx1 00:00:00:00:00:00 dst 192.0.2.50 self
157 bridge fdb del dev vx1 00:00:00:00:00:00 dst 192.0.2.50 self
158 bridge fdb del dev vx1 00:00:00:00:00:00 dst 192.0.2.34 self
160 ip link set dev vx1 nomaster
161 ip link set dev vx1 down
162 ip link del dev vx1
[all …]
/external/linux-kselftest/tools/testing/selftests/drivers/net/mlxsw/
Dextack.sh38 ip link add name vx1 up type vxlan id 1000 \
42 ip link set dev vx1 master br1
75 ip link del dev vx1
86 ip link add name vx1 up type vxlan id 1000 \
90 ip link set dev vx1 master br1
93 bridge vlan add dev vx1 vid 1
99 bridge vlan add dev vx1 vid 1 pvid untagged 2>&1 >/dev/null \
105 ip link del dev vx1
116 ip link add name vx1 up type vxlan id 1000 \
125 ip link set dev vx1 master br1
[all …]
/external/XNNPACK/src/x32-packx/
Dx4-sse.c42 const __m128 vx1 = _mm_loadu_ps(x1); in xnn_x32_packx_ukernel_4x__sse() local
49 const __m128 vt0 = _mm_unpacklo_ps(vx0, vx1); in xnn_x32_packx_ukernel_4x__sse()
50 const __m128 vt1 = _mm_unpackhi_ps(vx0, vx1); in xnn_x32_packx_ukernel_4x__sse()
72 const __m128 vx1 = _mm_load_ss(x1); in xnn_x32_packx_ukernel_4x__sse() local
79 const __m128 vx01 = _mm_unpacklo_ps(vx0, vx1); in xnn_x32_packx_ukernel_4x__sse()
Dx4-psimd.c40 const psimd_u32 vx1 = psimd_load_u32(x1); in xnn_x32_packx_ukernel_4x__psimd() local
47 const psimd_u32 vt0 = psimd_interleave_lo_u32(vx0, vx1); in xnn_x32_packx_ukernel_4x__psimd()
48 const psimd_u32 vt1 = psimd_interleave_hi_u32(vx0, vx1); in xnn_x32_packx_ukernel_4x__psimd()
70 const psimd_u32 vx1 = psimd_load1_u32(x1); in xnn_x32_packx_ukernel_4x__psimd() local
76 const psimd_u32 vx01 = psimd_interleave_lo_u32(vx0, vx1); in xnn_x32_packx_ukernel_4x__psimd()
Dx2-scalar.c31 const float vx1 = *x1++; in xnn_x32_packx_ukernel_2x__scalar() local
34 y_f32[1] = vx1; in xnn_x32_packx_ukernel_2x__scalar()
Dx3-scalar.c35 const float vx1 = *x1++; in xnn_x32_packx_ukernel_3x__scalar() local
39 y_f32[1] = vx1; in xnn_x32_packx_ukernel_3x__scalar()
Dx4-scalar.c39 const float vx1 = *x1++; in xnn_x32_packx_ukernel_4x__scalar() local
44 y_f32[1] = vx1; in xnn_x32_packx_ukernel_4x__scalar()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Dscalar-lut64-p2-x2-acc2.c52 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2() local
62 float vn1 = vx1 * vlog2e_x64 + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
91 float vt1 = vn1 * vminus_ln2_o64_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
116 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
Dscalar-p5-x2-acc2.c51 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2() local
59 float vn1 = vx1 * vlog2e + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
73 float vt1 = vn1 * vminus_ln2_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
106 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
Dscalar-p5-x2.c50 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2() local
58 float vn1 = vx1 * vlog2e + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
72 float vt1 = vn1 * vminus_ln2_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
105 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
Dscalar-lut64-p2-x2.c51 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2() local
61 float vn1 = vx1 * vlog2e_x64 + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
90 float vt1 = vn1 * vminus_ln2_o64_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
115 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
Dscalar-p5-x4.c52 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4() local
62 float vn1 = vx1 * vlog2e + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4()
82 float vt1 = vn1 * vminus_ln2_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4()
131 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4()
Dscalar-lut64-p2-x4-acc2.c54 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc2() local
66 float vn1 = vx1 * vlog2e_x64 + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc2()
105 float vt1 = vn1 * vminus_ln2_o64_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc2()
140 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc2()
Dscalar-p5-x4-acc2.c53 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2() local
63 float vn1 = vx1 * vlog2e + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2()
83 float vt1 = vn1 * vminus_ln2_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2()
132 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc2()
Dscalar-lut64-p2-x4.c53 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4() local
65 float vn1 = vx1 * vlog2e_x64 + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4()
104 float vt1 = vn1 * vminus_ln2_o64_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4()
139 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4()
Dscalar-p5-x4-acc4.c55 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4() local
65 float vn1 = vx1 * vlog2e + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4()
85 float vt1 = vn1 * vminus_ln2_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4()
134 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x4_acc4()
Dscalar-lut64-p2-x4-acc4.c56 const float vx1 = vi1 - vi_max; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc4() local
68 float vn1 = vx1 * vlog2e_x64 + vmagic_bias; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc4()
107 float vt1 = vn1 * vminus_ln2_o64_hi + vx1; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc4()
142 if XNN_UNPREDICTABLE(vx1 < vdenorm_cutoff) { in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x4_acc4()
/external/XNNPACK/src/f32-hswish/gen/
Dwasm-x2.c34 const float vx1 = x[1]; in xnn_f32_hswish_ukernel__wasm_x2() local
38 float vacc1 = vx1 * vsixth + vhalf; in xnn_f32_hswish_ukernel__wasm_x2()
47 vacc1 *= vx1; in xnn_f32_hswish_ukernel__wasm_x2()
Dscalar-x2.c34 const float vx1 = x[1]; in xnn_f32_hswish_ukernel__scalar_x2() local
38 float vacc1 = vx1 * vsixth + vhalf; in xnn_f32_hswish_ukernel__scalar_x2()
47 vacc1 *= vx1; in xnn_f32_hswish_ukernel__scalar_x2()
Dscalar-x4.c34 const float vx1 = x[1]; in xnn_f32_hswish_ukernel__scalar_x4() local
40 float vacc1 = vx1 * vsixth + vhalf; in xnn_f32_hswish_ukernel__scalar_x4()
55 vacc1 *= vx1; in xnn_f32_hswish_ukernel__scalar_x4()
Dwasm-x4.c34 const float vx1 = x[1]; in xnn_f32_hswish_ukernel__wasm_x4() local
40 float vacc1 = vx1 * vsixth + vhalf; in xnn_f32_hswish_ukernel__wasm_x4()
55 vacc1 *= vx1; in xnn_f32_hswish_ukernel__wasm_x4()
/external/XNNPACK/src/f32-sigmoid/gen/
Dscalar-lut2048-p1-div-x2.c46 const float vx1 = x[1]; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2() local
57 const float vz1 = fabsf(vx1); in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2()
130 if XNN_UNPREDICTABLE(vx1 > 0.0f) { in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2()
Dscalar-p5-div-x2.c45 const float vx1 = x[1]; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2() local
56 const float vz1 = fabsf(vx1); in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2()
125 if XNN_UNPREDICTABLE(vx1 > 0.0f) { in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2()
Dscalar-lut64-p2-div-x2.c46 const float vx1 = x[1]; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2() local
57 const float vz1 = fabsf(vx1); in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2()
134 if XNN_UNPREDICTABLE(vx1 > 0.0f) { in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2()

12345678