• Home
  • Raw
  • Download

Lines Matching refs:reg0

112     int32x4_t reg0[4], reg1[4];  in ihevce_wt_avg_2d_16x1_neon()  local
129 reg0[0] = vmovl_s16(vget_low_s16(a2)); in ihevce_wt_avg_2d_16x1_neon()
130 reg0[1] = vmovl_s16(vget_high_s16(a2)); in ihevce_wt_avg_2d_16x1_neon()
131 reg0[2] = vmovl_s16(vget_low_s16(a3)); in ihevce_wt_avg_2d_16x1_neon()
132 reg0[3] = vmovl_s16(vget_high_s16(a3)); in ihevce_wt_avg_2d_16x1_neon()
139 reg0[0] = vmulq_s32(reg0[0], a6); in ihevce_wt_avg_2d_16x1_neon()
140 reg0[1] = vmulq_s32(reg0[1], a6); in ihevce_wt_avg_2d_16x1_neon()
141 reg0[2] = vmulq_s32(reg0[2], a6); in ihevce_wt_avg_2d_16x1_neon()
142 reg0[3] = vmulq_s32(reg0[3], a6); in ihevce_wt_avg_2d_16x1_neon()
149 reg0[0] = vaddq_s32(reg0[0], reg1[0]); in ihevce_wt_avg_2d_16x1_neon()
150 reg0[1] = vaddq_s32(reg0[1], reg1[1]); in ihevce_wt_avg_2d_16x1_neon()
151 reg0[2] = vaddq_s32(reg0[2], reg1[2]); in ihevce_wt_avg_2d_16x1_neon()
152 reg0[3] = vaddq_s32(reg0[3], reg1[3]); in ihevce_wt_avg_2d_16x1_neon()
154 reg0[0] = vshlq_s32(reg0[0], a9); in ihevce_wt_avg_2d_16x1_neon()
155 reg0[1] = vshlq_s32(reg0[1], a9); in ihevce_wt_avg_2d_16x1_neon()
156 reg0[2] = vshlq_s32(reg0[2], a9); in ihevce_wt_avg_2d_16x1_neon()
157 reg0[3] = vshlq_s32(reg0[3], a9); // (p0*w0 + p1*w1) >> shift in ihevce_wt_avg_2d_16x1_neon()
159 a2 = vcombine_s16(vmovn_s32(reg0[0]), vmovn_s32(reg0[1])); in ihevce_wt_avg_2d_16x1_neon()
160 a3 = vcombine_s16(vmovn_s32(reg0[2]), vmovn_s32(reg0[3])); in ihevce_wt_avg_2d_16x1_neon()
232 int32x4_t reg0[4], reg1[4]; in ihevce_wt_avg_2d_4xn_neon() local
255 reg0[0] = vmovl_s16(vreinterpret_s16_u16(vget_low_u16(a0))); in ihevce_wt_avg_2d_4xn_neon()
256 reg0[1] = vmovl_s16(vreinterpret_s16_u16(vget_high_u16(a0))); in ihevce_wt_avg_2d_4xn_neon()
257 reg0[2] = vmovl_s16(vreinterpret_s16_u16(vget_low_u16(a1))); in ihevce_wt_avg_2d_4xn_neon()
258 reg0[3] = vmovl_s16(vreinterpret_s16_u16(vget_high_u16(a1))); in ihevce_wt_avg_2d_4xn_neon()
265 reg0[0] = vmulq_s32(reg0[0], a4); in ihevce_wt_avg_2d_4xn_neon()
266 reg0[1] = vmulq_s32(reg0[1], a4); in ihevce_wt_avg_2d_4xn_neon()
267 reg0[2] = vmulq_s32(reg0[2], a4); in ihevce_wt_avg_2d_4xn_neon()
268 reg0[3] = vmulq_s32(reg0[3], a4); in ihevce_wt_avg_2d_4xn_neon()
275 reg0[0] = vaddq_s32(reg0[0], reg1[0]); in ihevce_wt_avg_2d_4xn_neon()
276 reg0[1] = vaddq_s32(reg0[1], reg1[1]); in ihevce_wt_avg_2d_4xn_neon()
277 reg0[2] = vaddq_s32(reg0[2], reg1[2]); in ihevce_wt_avg_2d_4xn_neon()
278 reg0[3] = vaddq_s32(reg0[3], reg1[3]); in ihevce_wt_avg_2d_4xn_neon()
280 reg0[0] = vshlq_s32(reg0[0], a7); in ihevce_wt_avg_2d_4xn_neon()
281 reg0[1] = vshlq_s32(reg0[1], a7); in ihevce_wt_avg_2d_4xn_neon()
282 reg0[2] = vshlq_s32(reg0[2], a7); in ihevce_wt_avg_2d_4xn_neon()
283 reg0[3] = vshlq_s32(reg0[3], a7); in ihevce_wt_avg_2d_4xn_neon()
285 a8 = vcombine_s16(vmovn_s32(reg0[0]), vmovn_s32(reg0[1])); in ihevce_wt_avg_2d_4xn_neon()
286 a9 = vcombine_s16(vmovn_s32(reg0[2]), vmovn_s32(reg0[3])); in ihevce_wt_avg_2d_4xn_neon()