Home
last modified time | relevance | path

Searched refs:v26 (Results 1 – 25 of 207) sorted by relevance

123456789

/external/libavc/common/armv8/
Dih264_inter_pred_luma_horz_hpel_vert_hpel_av8.s82 movi v26.8h, #0x14 // Filter coeff 20 into Q13
120 uaddl v26.8h, v3.8b, v9.8b
125 mls v20.8h, v26.8h , v30.8h
126 uaddl v26.8h, v13.8b, v16.8b
128 mls v22.8h, v26.8h , v30.8h
131 ext v26.16b, v18.16b , v20.16b , #6
134 add v0.8h, v24.8h , v26.8h
136 ext v26.16b, v18.16b , v20.16b , #8
137 add v24.8h, v24.8h , v26.8h
139 saddl v26.4s, v18.4h, v23.4h
[all …]
Dih264_intra_pred_luma_16x16_av8.s502 shl v26.8h, v6.8h, #3
504 sub v30.8h, v30.8h , v26.8h
506 add v26.8h, v28.8h , v0.8h
508 sqrshrun v20.8b, v26.8h, #5
510 add v26.8h, v26.8h , v6.8h
512 sqrshrun v22.8b, v26.8h, #5
515 add v26.8h, v26.8h , v6.8h
517 sqrshrun v20.8b, v26.8h, #5
520 add v26.8h, v26.8h , v6.8h
522 sqrshrun v22.8b, v26.8h, #5
[all …]
Dih264_inter_pred_luma_horz_qpel_vert_hpel_av8.s143 movi v26.8h, #0x14 // Filter coeff 20 into Q13
182 uaddl v26.8h, v3.8b, v9.8b
187 mls v20.8h, v26.8h , v30.8h
188 uaddl v26.8h, v13.8b, v16.8b
190 mls v22.8h, v26.8h , v30.8h
194 ext v26.16b, v18.16b , v20.16b , #6
197 add v0.8h, v24.8h , v26.8h
199 ext v26.16b, v18.16b , v20.16b , #8
200 add v24.8h, v24.8h , v26.8h
202 saddl v26.4s, v18.4h, v22.4h
[all …]
Dih264_inter_pred_luma_vert_qpel_av8.s154 uaddl v26.8h, v3.8b, v9.8b // temp5 = src[1_8] + src[4_8]
160 mls v20.8h, v26.8h , v24.8h // temp4 -= temp5 * 5
161 uaddl v26.8h, v5.8b, v11.8b
177 mls v14.8h, v26.8h , v24.8h
181 uaddl v26.8h, v7.8b, v1.8b
193 mls v16.8h, v26.8h , v24.8h
199 uaddl v26.8h, v9.8b, v3.8b
204 mls v18.8h, v26.8h , v24.8h
212 uaddl v26.8h, v5.8b, v11.8b // temp5 = src[1_8] + src[4_8]
230 mls v20.8h, v26.8h , v24.8h // temp4 -= temp5 * 5
[all …]
/external/libhevc/common/arm64/
Dihevc_sao_edge_offset_class1.s155 …Uxtl v26.8h, v18.8b //II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_…
198 …SADDW v26.8h, v26.8h , v24.8b //II pi2_tmp_cur_row.val[0] = vaddw_s8(pi2_tmp_cur_row.val…
204 …SMAX v26.8h, v26.8h , v2.8h //II pi2_tmp_cur_row.val[0] = vmaxq_s16(pi2_tmp_cur_row.va…
205 …UMIN v26.8h, v26.8h , v4.8h //II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vminq_…
211 xtn v30.8b, v26.8h //II vmovn_s16(pi2_tmp_cur_row.val[0])
234 …Uxtl v26.8h, v3.8b //pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_u8(…
235 …SADDW v26.8h, v26.8h , v24.8b //pi2_tmp_cur_row.val[0] = vaddw_s8(pi2_tmp_cur_row.val[0]…
236 …SMAX v26.8h, v26.8h , v2.8h //pi2_tmp_cur_row.val[0] = vmaxq_s16(pi2_tmp_cur_row.val[0…
237 …UMIN v26.8h, v26.8h , v4.8h //pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vminq_u16…
245 xtn v30.8b, v26.8h //vmovn_s16(pi2_tmp_cur_row.val[0])
[all …]
Dihevc_inter_pred_chroma_vert_w16inp.s213 smull v26.4s, v2.4h, v16.4h //vmull_s16(src_tmp2, coeff_0)
215 smlal v26.4s, v3.4h, v17.4h
216 smlal v26.4s, v4.4h, v18.4h
218 smlal v26.4s, v5.4h, v19.4h
234 sqshrn v26.4h, v26.4s,#6 //right shift
249 sqrshrun v26.8b, v26.8h,#6 //rounding shift
256 st1 {v26.s}[0],[x9],x3 //stores the loaded value
266 smull v26.4s, v2.4h, v16.4h //vmull_s16(src_tmp2, coeff_0)
268 smlal v26.4s, v3.4h, v17.4h
271 smlal v26.4s, v4.4h, v18.4h
[all …]
Dihevc_itrans_recon_16x16.s256 smull v26.4s, v6.4h, v0.h[3] //// y1 * cos3(part of b1)
261 smlal v26.4s, v7.4h, v2.h[1] //// y1 * cos3 - y3 * sin1(part of b1)
283 smlal v26.4s, v8.4h, v3.h[3]
289 smlsl v26.4s, v9.4h, v2.h[3]
332 smlsl v26.4s, v6.4h, v1.h[1] //// y1 * cos3(part of b1)
337 smlsl v26.4s, v7.4h, v0.h[1] //// y1 * cos3 - y3 * sin1(part of b1)
344 smlsl v26.4s, v8.4h, v1.h[3]
350 smlsl v26.4s, v9.4h, v3.h[1]
387 add v12.4s, v14.4s , v26.4s
388 sub v24.4s, v14.4s , v26.4s
[all …]
Dihevc_sao_edge_offset_class1_chroma.s184 …Uxtl v26.8h, v18.8b //II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_…
244 …SADDW v26.8h, v26.8h , v24.8b //II pi2_tmp_cur_row.val[0] = vaddw_s8(pi2_tmp_cur_row.val…
252 …SMAX v26.8h, v26.8h , v2.8h //II pi2_tmp_cur_row.val[0] = vmaxq_s16(pi2_tmp_cur_row.va…
253 …UMIN v26.8h, v26.8h , v4.8h //II pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vminq_…
259 xtn v30.8b, v26.8h //II vmovn_s16(pi2_tmp_cur_row.val[0])
294 …Uxtl v26.8h, v3.8b //pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vmovl_u8(…
295 …SADDW v26.8h, v26.8h , v24.8b //pi2_tmp_cur_row.val[0] = vaddw_s8(pi2_tmp_cur_row.val[0]…
296 …SMAX v26.8h, v26.8h , v2.8h //pi2_tmp_cur_row.val[0] = vmaxq_s16(pi2_tmp_cur_row.val[0…
297 …UMIN v26.8h, v26.8h , v4.8h //pi2_tmp_cur_row.val[0] = vreinterpretq_s16_u16(vminq_u16…
305 xtn v30.8b, v26.8h //vmovn_s16(pi2_tmp_cur_row.val[0])
[all …]
Dihevc_itrans_recon_32x32.s217 smull v26.4s, v8.4h, v0.h[3] //// y1 * cos3(part of b1)
222 smlal v26.4s, v9.4h, v2.h[1] //// y1 * cos3 - y3 * sin1(part of b1)
257 smlal v26.4s, v14.4h, v3.h[3]
263 smlal v26.4s, v15.4h, v5.h[1]
287 smlal v26.4s, v8.4h, v6.h[3] //// y1 * cos3(part of b1)
292 smlsl v26.4s, v9.4h, v7.h[3] //// y1 * cos3 - y3 * sin1(part of b1)
331 smlsl v26.4s, v14.4h, v6.h[1]
337 smlsl v26.4s, v15.4h, v4.h[3]
363 smlsl v26.4s, v8.4h, v3.h[1] //// y1 * cos3(part of b1)
368 smlsl v26.4s, v9.4h, v1.h[3] //// y1 * cos3 - y3 * sin1(part of b1)
[all …]
Dihevc_sao_edge_offset_class2_chroma.s564 ADD v26.16b, v0.16b , v17.16b //II edge_idx = vaddq_s8(const_2, sign_up)
569 ADD v26.16b, v26.16b , v24.16b //II edge_idx = vaddq_s8(edge_idx, sign_down)
576 TBL v26.16b, {v22.16b},v26.16b //II vtbl1_s8(edge_idx_tbl, vget_low_s8(edge_idx))
587 AND v26.16b, v26.16b , v1.16b //II edge_idx = vandq_s8(edge_idx, au1_mask)
590 mov v27.d[0],v26.d[1]
591 UZP1 v31.8b, v26.8b, v27.8b
592 UZP2 v27.8b, v26.8b, v27.8b //II
593 mov v26.8b,v31.8b
600 TBL v24.8b, {v6.16b},v26.8b //II
628 …Uxtl2 v26.8h, v5.16b //II pi2_tmp_cur_row.val[1] = vreinterpretq_s16_u16(vmovl_…
[all …]
Dihevc_inter_pred_chroma_vert_w16inp_w16out.s212 smull v26.4s, v2.4h, v16.4h //vmull_s16(src_tmp2, coeff_0)
213 smlal v26.4s, v3.4h, v17.4h
214 smlal v26.4s, v4.4h, v18.4h
217 smlal v26.4s, v5.4h, v19.4h
232 sqshrn v26.4h, v26.4s,#6 //right shift
252 st1 {v26.2s},[x9],x3 //stores the loaded value
261 smull v26.4s, v2.4h, v16.4h //vmull_s16(src_tmp2, coeff_0)
263 smlal v26.4s, v3.4h, v17.4h
266 smlal v26.4s, v4.4h, v18.4h
268 smlal v26.4s, v5.4h, v19.4h
[all …]
Dihevc_sao_edge_offset_class3.s412 cmhi v26.16b, v18.16b , v5.16b //II vcltq_u8(pu1_cur_row, pu1_next_row_tmp)
416 …SUB v24.16b, v26.16b , v24.16b //II sign_down = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp…
426 ADD v26.16b, v0.16b , v17.16b //II edge_idx = vaddq_s8(const_2, sign_up)
431 ADD v26.16b, v26.16b , v24.16b //II edge_idx = vaddq_s8(edge_idx, sign_down)
434 TBL v26.16b, {v6.16b},v26.16b //II vtbl1_s8(edge_idx_tbl, vget_low_s8(edge_idx))
442 AND v26.16b, v26.16b , v1.16b //II edge_idx = vandq_s8(edge_idx, au1_mask)
445 … TBL v24.16b, {v7.16b},v26.16b //II offset = vtbl1_s8(offset_tbl, vget_low_s8(edge_idx))
460 …Uxtl2 v26.8h, v5.16b //II pi2_tmp_cur_row.val[1] = vreinterpretq_s16_u16(vmovl_…
463 …SADDW2 v26.8h, v26.8h , v24.16b //II pi2_tmp_cur_row.val[1] = vaddw_s8(pi2_tmp_cur_row.val…
465 …SMAX v26.8h, v26.8h , v2.8h //II pi2_tmp_cur_row.val[1] = vmaxq_s16(pi2_tmp_cur_row.va…
[all …]
Dihevc_inter_pred_chroma_vert.s257 umull v26.8h, v7.8b, v1.8b
259 umlsl v26.8h, v6.8b, v0.8b
261 umlal v26.8h, v16.8b, v2.8b
263 umlsl v26.8h, v17.8b, v3.8b
279 sqrshrun v26.8b, v26.8h,#6
299 st1 {v26.8b},[x7],x3 //stores the loaded value
318 umull v26.8h, v7.8b, v1.8b
325 umlsl v26.8h, v6.8b, v0.8b
328 umlal v26.8h, v16.8b, v2.8b
331 umlsl v26.8h, v17.8b, v3.8b
[all …]
/external/llvm/test/CodeGen/SystemZ/
Dvec-max-02.ll8 ; CHECK: vmxh %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmxh %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmxh %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmxh %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmxlh %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmxlh %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmxlh %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmxlh %v24, {{%v24, %v26|%v26, %v24}}
Dvec-min-03.ll8 ; CHECK: vmnf %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmnf %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmnf %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmnf %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmnlf %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmnlf %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmnlf %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmnlf %v24, {{%v24, %v26|%v26, %v24}}
Dvec-min-04.ll8 ; CHECK: vmng %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmng %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmng %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmng %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmnlg %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmnlg %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmnlg %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmnlg %v24, {{%v24, %v26|%v26, %v24}}
Dvec-max-03.ll8 ; CHECK: vmxf %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmxf %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmxf %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmxf %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmxlf %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmxlf %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmxlf %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmxlf %v24, {{%v24, %v26|%v26, %v24}}
Dvec-min-01.ll8 ; CHECK: vmnb %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmnb %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmnb %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmnb %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmnlb %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmnlb %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmnlb %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmnlb %v24, {{%v24, %v26|%v26, %v24}}
Dvec-min-02.ll8 ; CHECK: vmnh %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmnh %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmnh %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmnh %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmnlh %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmnlh %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmnlh %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmnlh %v24, {{%v24, %v26|%v26, %v24}}
Dvec-max-01.ll8 ; CHECK: vmxb %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmxb %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmxb %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmxb %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmxlb %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmxlb %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmxlb %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmxlb %v24, {{%v24, %v26|%v26, %v24}}
Dvec-max-04.ll8 ; CHECK: vmxg %v24, {{%v24, %v26|%v26, %v24}}
18 ; CHECK: vmxg %v24, {{%v24, %v26|%v26, %v24}}
28 ; CHECK: vmxg %v24, {{%v24, %v26|%v26, %v24}}
38 ; CHECK: vmxg %v24, {{%v24, %v26|%v26, %v24}}
48 ; CHECK: vmxlg %v24, {{%v24, %v26|%v26, %v24}}
58 ; CHECK: vmxlg %v24, {{%v24, %v26|%v26, %v24}}
68 ; CHECK: vmxlg %v24, {{%v24, %v26|%v26, %v24}}
78 ; CHECK: vmxlg %v24, {{%v24, %v26|%v26, %v24}}
Dvec-move-01.ll8 ; CHECK: vlr %v24, %v26
16 ; CHECK: vlr %v24, %v26
24 ; CHECK: vlr %v24, %v26
32 ; CHECK: vlr %v24, %v26
40 ; CHECK: vlr %v24, %v26
48 ; CHECK: vlr %v24, %v26
56 ; CHECK: vlr %v24, %v26
64 ; CHECK: vlr %v24, %v26
72 ; CHECK: vlr %v24, %v26
80 ; CHECK: vlr %v24, %v26
[all …]
Dvec-cmp-05.ll10 ; CHECK-DAG: vmrhf [[HIGH1E:%v[0-9]+]], %v26, %v26
11 ; CHECK-DAG: vmrlf [[LOW1E:%v[0-9]+]], %v26, %v26
30 ; CHECK-DAG: vmrhf [[HIGH1E:%v[0-9]+]], %v26, %v26
31 ; CHECK-DAG: vmrlf [[LOW1E:%v[0-9]+]], %v26, %v26
54 ; CHECK-DAG: vmrhf [[HIGH1E:%v[0-9]+]], %v26, %v26
55 ; CHECK-DAG: vmrlf [[LOW1E:%v[0-9]+]], %v26, %v26
74 ; CHECK-DAG: vmrhf [[HIGH1E:%v[0-9]+]], %v26, %v26
75 ; CHECK-DAG: vmrlf [[LOW1E:%v[0-9]+]], %v26, %v26
94 ; CHECK-DAG: vmrhf [[HIGH1E:%v[0-9]+]], %v26, %v26
95 ; CHECK-DAG: vmrlf [[LOW1E:%v[0-9]+]], %v26, %v26
[all …]
Dvec-cmp-06.ll8 ; CHECK: vfcedb %v24, %v26, %v28
18 ; CHECK-DAG: vfchdb [[REG1:%v[0-9]+]], %v28, %v26
19 ; CHECK-DAG: vfchdb [[REG2:%v[0-9]+]], %v26, %v28
30 ; CHECK: vfchdb %v24, %v26, %v28
40 ; CHECK: vfchedb %v24, %v26, %v28
50 ; CHECK: vfchedb %v24, %v28, %v26
60 ; CHECK: vfchdb %v24, %v28, %v26
70 ; CHECK-DAG: vfchdb [[REG1:%v[0-9]+]], %v28, %v26
71 ; CHECK-DAG: vfchdb [[REG2:%v[0-9]+]], %v26, %v28
82 ; CHECK: vfcedb [[REG:%v[0-9]+]], %v26, %v28
[all …]
/external/libavc/encoder/armv8/
Dih264e_half_pel_av8.s117 ext v26.8b, v7.8b , v7.8b , #5
121 uaddl v18.8h, v26.8b, v7.8b //// a0 + a5 (column3,row1)
130 ext v26.8b, v7.8b , v7.8b , #2
134 umlal v18.8h, v26.8b, v1.8b //// a0 + a5 + 20a2 (column3,row1)
143 ext v26.8b, v7.8b , v7.8b , #3
147 umlal v18.8h, v26.8b, v1.8b //// a0 + a5 + 20a2 + 20a3 (column3,row1)
156 ext v26.8b, v7.8b , v7.8b , #1
160 umlsl v18.8h, v26.8b, v0.8b //// a0 + a5 + 20a2 + 20a3 - 5a1 (column3,row1)
169 ext v26.8b, v7.8b , v7.8b , #4
172 umlsl v18.8h, v26.8b, v0.8b //// a0 + a5 + 20a2 + 20a3 - 5a1 - 5a4 (column3,row1)
[all …]

123456789