Lines Matching refs:vcprm
98 x2 = vec_perm(x0, x1, vcprm(2,s2,3,s3)); in pass_vsx_interleave()
99 x3 = vec_perm(x0, x1, vcprm(s3,3,s2,2)); in pass_vsx_interleave()
101 y4 = vec_perm(y0, y1, vcprm(s1,1,s0,0)); in pass_vsx_interleave()
102 y5 = vec_perm(y0, y1, vcprm(s3,3,s2,2)); in pass_vsx_interleave()
103 y2 = vec_perm(y0, y1, vcprm(0,s0,1,s1)); in pass_vsx_interleave()
104 y3 = vec_perm(y0, y1, vcprm(2,s2,3,s3)); in pass_vsx_interleave()
117 x8 = vec_perm(x6, x7, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
118 y10 = vec_perm(y8, y9, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
119 y15 = vec_perm(y13, y14, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
121 x9 = vec_perm(x0, x8, vcprm(0,1,s0,s2)); in pass_vsx_interleave()
122 x10 = vec_perm(x1, x8, vcprm(1,0,s3,s1)); in pass_vsx_interleave()
124 y16 = vec_perm(y10, y15, vcprm(0,2,s0,s2)); in pass_vsx_interleave()
125 y17 = vec_perm(y10, y15, vcprm(3,1,s3,s1)); in pass_vsx_interleave()
137 x15 = vec_perm(x13, x14, vcprm(0,s1,2,s3)); in pass_vsx_interleave()
138 x16 = vec_perm(x13, x14, vcprm(s0,1,s2,3)); in pass_vsx_interleave()
139 y22 = vec_perm(y20, y21, vcprm(0,s1,2,s3)); in pass_vsx_interleave()
140 y23 = vec_perm(y20, y21, vcprm(s0,1,s2,3)); in pass_vsx_interleave()
181 x4 = vec_perm(x0, x1, vcprm(s1,1,s0,0)); in pass_vsx_interleave()
182 x5 = vec_perm(x0, x1, vcprm(s3,3,s2,2)); in pass_vsx_interleave()
183 x2 = vec_perm(x0, x1, vcprm(0,s0,1,s1)); in pass_vsx_interleave()
184 x3 = vec_perm(x0, x1, vcprm(2,s2,3,s3)); in pass_vsx_interleave()
186 y2 = vec_perm(y0, y1, vcprm(0,s0,1,s1)); in pass_vsx_interleave()
187 y3 = vec_perm(y0, y1, vcprm(2,s2,3,s3)); in pass_vsx_interleave()
191 y4 = vec_perm(y0, y1, vcprm(s1,1,s0,0)); in pass_vsx_interleave()
192 y5 = vec_perm(y0, y1, vcprm(s3,3,s2,2)); in pass_vsx_interleave()
207 x10 = vec_perm(x8, x9, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
208 x15 = vec_perm(x13, x14, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
210 y10 = vec_perm(y8, y9, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
211 y15 = vec_perm(y13, y14, vcprm(0,1,s2,s3)); in pass_vsx_interleave()
213 x16 = vec_perm(x10, x15, vcprm(0,2,s0,s2)); in pass_vsx_interleave()
214 x17 = vec_perm(x10, x15, vcprm(3,1,s3,s1)); in pass_vsx_interleave()
216 y16 = vec_perm(y10, y15, vcprm(0,2,s0,s2)); in pass_vsx_interleave()
217 y17 = vec_perm(y10, y15, vcprm(3,1,s3,s1)); in pass_vsx_interleave()
229 x22 = vec_perm(x20, x21, vcprm(0,s1,2,s3)); in pass_vsx_interleave()
230 x23 = vec_perm(x20, x21, vcprm(s0,1,s2,3)); in pass_vsx_interleave()
232 y22 = vec_perm(y20, y21, vcprm(0,s1,2,s3)); in pass_vsx_interleave()
233 y23 = vec_perm(y20, y21, vcprm(s0,1,s2,3)); in pass_vsx_interleave()
266 c = vec_perm(a, b, vcprm(0,1,s2,s1)); in fft4_vsx_interleave()
267 d = vec_perm(a, b, vcprm(2,3,s0,s3)); in fft4_vsx_interleave()
271 c = vec_perm(a, b, vcprm(0,1,s0,s1)); in fft4_vsx_interleave()
272 d = vec_perm(a, b, vcprm(2,3,s3,s2)); in fft4_vsx_interleave()
301 x0 = vec_perm(vz0, vz1, vcprm(0,1,s2,s1)); in fft8_vsx_interleave()
302 x1 = vec_perm(vz0, vz1, vcprm(2,3,s0,s3)); in fft8_vsx_interleave()
303 x2 = vec_perm(vz2, vz3, vcprm(2,1,s0,s1)); in fft8_vsx_interleave()
304 x3 = vec_perm(vz2, vz3, vcprm(0,3,s2,s3)); in fft8_vsx_interleave()
311 x8 = vec_perm(x4, x5, vcprm(0,1,s0,s1)); in fft8_vsx_interleave()
312 x9 = vec_perm(x4, x5, vcprm(2,3,s3,s2)); in fft8_vsx_interleave()
313 x10 = vec_perm(x6, x7, vcprm(2,1,s2,s1)); in fft8_vsx_interleave()
314 x11 = vec_perm(x6, x7, vcprm(0,3,s0,s3)); in fft8_vsx_interleave()
320 x16 = vec_perm(x12, x13, vcprm(0,s0,1,s1)); in fft8_vsx_interleave()
321 x17 = vec_perm(x14, x15, vcprm(0,s0,1,s1)); in fft8_vsx_interleave()
322 x18 = vec_perm(x16, x17, vcprm(s0,s3,s2,s1)); in fft8_vsx_interleave()
326 x21 = vec_perm(x12, x13, vcprm(2,s2,3,s3)); in fft8_vsx_interleave()
327 x22 = vec_perm(x14, x15, vcprm(2,3,s2,s3)); in fft8_vsx_interleave()
328 x23 = vec_perm(x14, x15, vcprm(3,2,s3,s2)); in fft8_vsx_interleave()
331 x26 = vec_mul( vec_perm(x24, x25, vcprm(2,s2,0,s0)), vc1); in fft8_vsx_interleave()
336 x29 = vec_perm(x19, x27, vcprm(0,2,s0,s2)); // z0.r z0.i z1.r z1.i in fft8_vsx_interleave()
337 x30 = vec_perm(x19, x27, vcprm(1,3,s1,s3)); // z2.r z2.i z7.r z3.i in fft8_vsx_interleave()
338 x31 = vec_perm(x20, x28, vcprm(0,2,s0,s2)); // z4.r z4.i z5.r z5.i in fft8_vsx_interleave()
339 x32 = vec_perm(x20, x28, vcprm(1,3,s1,s3)); // z6.r z6.i z3.r z7.i in fft8_vsx_interleave()
340 x33 = vec_perm(x30, x32, vcprm(0,1,s2,3)); // z2.r z2.i z3.r z3.i in fft8_vsx_interleave()
341 x34 = vec_perm(x30, x32, vcprm(s0,s1,2,s3)); // z6.r z6.i z7.r z7.i in fft8_vsx_interleave()
389 x0 = vec_perm(vz0, vz1, vcprm(0,1,s2,s1)); in fft16_vsx_interleave()
390 x1 = vec_perm(vz0, vz1, vcprm(2,3,s0,s3)); in fft16_vsx_interleave()
391 x2 = vec_perm(vz2, vz3, vcprm(0,1,s0,s1)); in fft16_vsx_interleave()
392 x3 = vec_perm(vz2, vz3, vcprm(2,3,s2,s3)); in fft16_vsx_interleave()
394 x4 = vec_perm(vz4, vz5, vcprm(0,1,s2,s1)); in fft16_vsx_interleave()
395 x5 = vec_perm(vz4, vz5, vcprm(2,3,s0,s3)); in fft16_vsx_interleave()
396 x6 = vec_perm(vz6, vz7, vcprm(0,1,s2,s1)); in fft16_vsx_interleave()
397 x7 = vec_perm(vz6, vz7, vcprm(2,3,s0,s3)); in fft16_vsx_interleave()
409 x16 = vec_perm(x8, x9, vcprm(0,1,s0,s1)); in fft16_vsx_interleave()
410 x17 = vec_perm(x8, x9, vcprm(2,3,s3,s2)); in fft16_vsx_interleave()
411 x18 = vec_perm(x10, x11, vcprm(2,1,s1,s2)); in fft16_vsx_interleave()
412 x19 = vec_perm(x10, x11, vcprm(0,3,s0,s3)); in fft16_vsx_interleave()
413 x20 = vec_perm(x12, x14, vcprm(0,1,s0, s1)); in fft16_vsx_interleave()
414 x21 = vec_perm(x12, x14, vcprm(2,3,s2,s3)); in fft16_vsx_interleave()
415 x22 = vec_perm(x13, x15, vcprm(0,1,s0,s1)); in fft16_vsx_interleave()
416 x23 = vec_perm(x13, x15, vcprm(3,2,s3,s2)); in fft16_vsx_interleave()
429 x34 = vec_perm(x32, x33, vcprm(0,1,s0,s1)); in fft16_vsx_interleave()
431 x35 = vec_perm(x28, x29, vcprm(2,1,s1,s2)); in fft16_vsx_interleave()
432 x36 = vec_perm(x28, x29, vcprm(0,3,s0,s3)); in fft16_vsx_interleave()
435 x39 = vec_perm(x37, x38, vcprm(0,1,s1,s0)); in fft16_vsx_interleave()
437 x40 = vec_perm(x27, x38, vcprm(3,2,s2,s3)); in fft16_vsx_interleave()
438 x41 = vec_perm(x26, x37, vcprm(2,3,s3,s2)); in fft16_vsx_interleave()
447 x48 = vec_perm(x30, x31, vcprm(2,1,s1,s2)); in fft16_vsx_interleave()
448 x49 = vec_perm(x30, x31, vcprm(0,3,s3,s0)); in fft16_vsx_interleave()
456 x56 = vec_perm(x24, x25, vcprm(2,3,s2,s3)); in fft16_vsx_interleave()
457 x57 = vec_perm(x44, x45, vcprm(0,1,s1,s0)); in fft16_vsx_interleave()
461 x60 = vec_perm(x54, x55, vcprm(1,0,3,2)); in fft16_vsx_interleave()
462 x61 = vec_perm(x54, x55, vcprm(s1,s0,s3,s2)); in fft16_vsx_interleave()
467 x66 = vec_perm(x62, x64, vcprm(0,1,s3,s2)); in fft16_vsx_interleave()
468 x67 = vec_perm(x63, x65, vcprm(s0,s1,3,2)); in fft16_vsx_interleave()
475 x72 = vec_perm(x25, x27, vcprm(s1,s0,s2,s3)); in fft16_vsx_interleave()
478 x75 = vec_perm(x73, x74, vcprm(0,1,s0,s1)); in fft16_vsx_interleave()
479 x76 = vec_perm(x44, x45, vcprm(3,2,s2,s3)); in fft16_vsx_interleave()
483 x79 = vec_perm(x46, x68, vcprm(0,1,s0,s1)); // z0.r z0.i z1.r z1.i in fft16_vsx_interleave()
484 x80 = vec_perm(x77, x68, vcprm(0,1,s2,s3)); // z2.r z2.i z3.r z3.i in fft16_vsx_interleave()
485 x81 = vec_perm(x46, x70, vcprm(2,3,s0,s1)); // z4.r z4.i z5.r z5.i in fft16_vsx_interleave()
486 x82 = vec_perm(x71, x77, vcprm(s2,s3,2,3)); // z6.r z6.i z7.r z7.i in fft16_vsx_interleave()
491 x83 = vec_perm(x47, x69, vcprm(0,1,s0,s1)); // z8.r z8.i z9.r z9.i in fft16_vsx_interleave()
492 x84 = vec_perm(x78, x69, vcprm(0,1,s2,s3)); // z10.r z10.i z11.r z11.i in fft16_vsx_interleave()
493 x85 = vec_perm(x47, x71, vcprm(2,3,s0,s1)); // z12.r z12.i z13.r z13.i in fft16_vsx_interleave()
494 x86 = vec_perm(x70, x78, vcprm(s2,s3,2,3)); // z14.r z14.i z15.r z15.i in fft16_vsx_interleave()
508 c = vec_perm(a, b, vcprm(0,1,s2,s1)); in fft4_vsx()
509 d = vec_perm(a, b, vcprm(2,3,s0,s3)); in fft4_vsx()
513 c = vec_perm(a,b, vcprm(0,s0,1,s1)); in fft4_vsx()
514 d = vec_perm(a, b, vcprm(2,s3,3,s2)); in fft4_vsx()
519 c = vec_perm(a, b, vcprm(0,1,s0,s1)); in fft4_vsx()
520 d = vec_perm(a, b, vcprm(2,3,s2,s3)); in fft4_vsx()
542 vz6 = vec_perm(vz2, vz3, vcprm(0,s0,1,s1)); in fft8_vsx()
543 vz7 = vec_perm(vz2, vz3, vcprm(2,s2,3,s3)); in fft8_vsx()
544 vz4 = vec_perm(vz0, vz1, vcprm(0,1,s2,s1)); in fft8_vsx()
545 vz5 = vec_perm(vz0, vz1, vcprm(2,3,s0,s3)); in fft8_vsx()
549 vz8 = vec_perm(vz3, vz3, vcprm(2,3,0,1)); in fft8_vsx()
557 vz4 = vec_perm(vz0, vz1, vcprm(0,s0,1,s1)); in fft8_vsx()
558 vz5 = vec_perm(vz0, vz1, vcprm(2,s3,3,s2)); in fft8_vsx()
559 vz6 = vec_perm(vz2, vz3, vcprm(1,2,s3,s0)); in fft8_vsx()
560 vz7 = vec_perm(vz2, vz3, vcprm(0,3,s2,s1)); in fft8_vsx()
567 vz4 = vec_perm(vz0, vz1, vcprm(0,1,s0,s1)); in fft8_vsx()
568 vz5 = vec_perm(vz0, vz1, vcprm(2,3,s2,s3)); in fft8_vsx()
569 vz6 = vec_perm(vz2, vz3, vcprm(0,2,s1,s3)); in fft8_vsx()
570 vz7 = vec_perm(vz2, vz3, vcprm(1,3,s0,s2)); in fft8_vsx()
606 vz4 = vec_perm(vz0, vz1, vcprm(0,1,s2,s1)); in fft16_vsx()
607 vz5 = vec_perm(vz0, vz1, vcprm(2,3,s0,s3)); in fft16_vsx()
608 vz6 = vec_perm(vz2, vz3, vcprm(0,1,s2,s1)); in fft16_vsx()
609 vz7 = vec_perm(vz2, vz3, vcprm(2,3,s0,s3)); in fft16_vsx()
616 vz4 = vec_perm(vz0, vz1, vcprm(0,s0,1,s1)); in fft16_vsx()
617 vz5 = vec_perm(vz0, vz1, vcprm(2,s3,3,s2)); in fft16_vsx()
618 vz6 = vec_perm(vz2, vz3, vcprm(0,s0,1,s1)); in fft16_vsx()
619 vz7 = vec_perm(vz2, vz3, vcprm(2,s3,3,s2)); in fft16_vsx()
626 vz4 = vec_perm(vz0, vz1, vcprm(0,1,s0,s1)); in fft16_vsx()
627 vz5 = vec_perm(vz0, vz1, vcprm(2,3,s2,s3)); in fft16_vsx()
629 vz6 = vec_perm(vz2, vz3, vcprm(0,1,s0,s1)); in fft16_vsx()
630 vz7 = vec_perm(vz2, vz3, vcprm(2,3,s2,s3)); in fft16_vsx()
636 vz10 = vec_perm(vz2, vz3, vcprm(0,s0,1,s1)); in fft16_vsx()
637 vz11 = vec_perm(vz2, vz3, vcprm(2,s2,3,s3)); in fft16_vsx()
638 vz8 = vec_perm(vz0, vz1, vcprm(0,1,s2,s1)); in fft16_vsx()
639 vz9 = vec_perm(vz0, vz1, vcprm(2,3,s0,s3)); in fft16_vsx()
643 vz12 = vec_perm(vz3, vz3, vcprm(2,3,0,1)); in fft16_vsx()
649 vz8 = vec_perm(vz0, vz1, vcprm(0,s0,1,s1)); in fft16_vsx()
650 vz9 = vec_perm(vz0, vz1, vcprm(2,s3,3,s2)); in fft16_vsx()
651 vz10 = vec_perm(vz2, vz3, vcprm(1,2,s3,s0)); in fft16_vsx()
652 vz11 = vec_perm(vz2, vz3, vcprm(0,3,s2,s1)); in fft16_vsx()
659 vz8 = vec_perm(vz0, vz1, vcprm(0,1,s0,s1)); in fft16_vsx()
660 vz9 = vec_perm(vz0, vz1, vcprm(2,3,s2,s3)); in fft16_vsx()
661 vz10 = vec_perm(vz2, vz3, vcprm(0,2,s1,s3)); in fft16_vsx()
662 vz11 = vec_perm(vz2, vz3, vcprm(1,3,s0,s2)); in fft16_vsx()
729 v9 = vec_perm(v9, v10, vcprm(s0,3,2,1)); in pass_vsx()
781 v9 = vec_perm(v9, v10, vcprm(s0,3,2,1)); in pass_vsx()