Lines Matching refs:cr3
114 v4sf tr2, ti2, cr2, ci2, cr3, ci3, dr2, di2, dr3, di3; in passf3_ps() local
126 cr3 = SVMUL(taui, VSUB(cc[i+ido], cc[i+2*ido])); in passf3_ps()
130 di2 = VADD(ci2, cr3); in passf3_ps()
131 di3 = VSUB(ci2, cr3); in passf3_ps()
148 v4sf ci2, ci3, ci4, cr2, cr3, cr4, ti1, ti2, ti3, ti4, tr1, tr2, tr3, tr4; in passf4_ps() local
184 cr3 = VSUB(tr2, tr3); in passf4_ps()
198 VCPLXMUL(cr3, ci3, LD_PS1(wr2), LD_PS1(wi2)); in passf4_ps()
200 ch[i + 2*l1ido] = cr3; in passf4_ps()
224 v4sf ci2, ci3, ci4, ci5, di3, di4, di5, di2, cr2, cr3, cr5, cr4, ti2, ti3, in passf5_ps() local
247 cr3 = VADD(cc_ref(i-1, 1), VADD(SVMUL(tr12, tr2),SVMUL(tr11, tr3))); in passf5_ps()
253 dr3 = VSUB(cr3, ci4); in passf5_ps()
254 dr4 = VADD(cr3, ci4); in passf5_ps()
392 v4sf ci2, ci3, di2, di3, cr2, cr3, dr2, dr3, ti2, tr2; in radb3_ps() local
411 cr3 = SVMUL(taui, VSUB(cc[i - 1 + (3*k + 2)*ido], cc[ic - 1 + (3*k + 1)*ido])); in radb3_ps()
415 di2 = VADD(ci2, cr3); in radb3_ps()
416 di3 = VSUB(ci2, cr3); in radb3_ps()
455 v4sf wr, wi, cr2, ci2, cr3, ci3, cr4, ci4; in radf4_ps() local
464 cr3 = pc[2*l1ido+0]; in radf4_ps()
468 VCPLXMULCONJ(cr3, ci3, wr, wi); in radf4_ps()
480 tr2 = VADD(pc[0],cr3); in radf4_ps()
481 tr3 = VSUB(pc[0],cr3); in radf4_ps()
517 v4sf ci2, ci3, ci4, cr2, cr3, cr4, ti1, ti2, ti3, ti4, tr1, tr2, tr3, tr4; in radb4_ps() local
549 cr3 = VSUB(tr2, tr3); in radb4_ps()
566 VCPLXMUL(cr3, ci3, LD_PS1(wa2[i-2]), LD_PS1(wa2[i-1])); in radb4_ps()
567 ph[0] = cr3; in radb4_ps()
604 v4sf ci2, di2, ci4, ci5, di3, di4, di5, ci3, cr2, cr3, dr2, dr3, dr4, dr5, in radf5_ps() local
622 cr3 = VADD(cc_ref(1, k, 4), cc_ref(1, k, 3)); in radf5_ps()
624 ch_ref(1, 1, k) = VADD(cc_ref(1, k, 1), VADD(cr2, cr3)); in radf5_ps()
625 ch_ref(ido, 2, k) = VADD(cc_ref(1, k, 1), VADD(SVMUL(tr11, cr2), SVMUL(tr12, cr3))); in radf5_ps()
627 ch_ref(ido, 4, k) = VADD(cc_ref(1, k, 1), VADD(SVMUL(tr12, cr2), SVMUL(tr11, cr3))); in radf5_ps()
650 cr3 = VADD(dr3, dr4); in radf5_ps()
654 ch_ref(i - 1, 1, k) = VADD(cc_ref(i - 1, k, 1), VADD(cr2, cr3)); in radf5_ps()
656 tr2 = VADD(cc_ref(i - 1, k, 1), VADD(SVMUL(tr11, cr2), SVMUL(tr12, cr3))); in radf5_ps()
658 tr3 = VADD(cc_ref(i - 1, k, 1), VADD(SVMUL(tr12, cr2), SVMUL(tr11, cr3))); in radf5_ps()
690 v4sf ci2, ci3, ci4, ci5, di3, di4, di5, di2, cr2, cr3, cr5, cr4, ti2, ti3, in radb5_ps() local
711 cr3 = VADD(cc_ref(1, 1, k), VADD(SVMUL(tr12, tr2), SVMUL(tr11, tr3))); in radb5_ps()
715 ch_ref(1, k, 3) = VSUB(cr3, ci4); in radb5_ps()
716 ch_ref(1, k, 4) = VADD(cr3, ci4); in radb5_ps()
738 cr3 = VADD(cc_ref(i-1, 1, k), VADD(SVMUL(tr12, tr2), SVMUL(tr11, tr3))); in radb5_ps()
744 dr3 = VSUB(cr3, ci4); in radb5_ps()
745 dr4 = VADD(cr3, ci4); in radb5_ps()
1379 float cr0, ci0, cr1, ci1, cr2, ci2, cr3, ci3; in FUNC_REAL_PREPROCESS() local
1408 cr3=(Xr.f[0]-Xi.f[0]) + 2*Xi.f[2]; uout[0].f[3] = cr3; in FUNC_REAL_PREPROCESS()