Lines Matching refs:vbc
15 vector bool char vbc = { 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0 }; variable
115 res_vsc = vec_add(vbc, vsc); in test1()
119 res_vsc = vec_add(vsc, vbc); in test1()
127 res_vuc = vec_add(vbc, vuc); in test1()
131 res_vuc = vec_add(vuc, vbc); in test1()
191 res_vsc = vec_vaddubm(vbc, vsc); in test1()
195 res_vsc = vec_vaddubm(vsc, vbc); in test1()
203 res_vuc = vec_vaddubm(vbc, vuc); in test1()
207 res_vuc = vec_vaddubm(vuc, vbc); in test1()
277 res_vsc = vec_adds(vbc, vsc); in test1()
281 res_vsc = vec_adds(vsc, vbc); in test1()
289 res_vuc = vec_adds(vbc, vuc); in test1()
293 res_vuc = vec_adds(vuc, vbc); in test1()
349 res_vsc = vec_vaddsbs(vbc, vsc); in test1()
353 res_vsc = vec_vaddsbs(vsc, vbc); in test1()
361 res_vuc = vec_vaddubs(vbc, vuc); in test1()
365 res_vuc = vec_vaddubs(vuc, vbc); in test1()
422 res_vsc = vec_and(vbc, vsc); in test1()
426 res_vsc = vec_and(vsc, vbc); in test1()
434 res_vuc = vec_and(vbc, vuc); in test1()
438 res_vuc = vec_and(vuc, vbc); in test1()
442 res_vbc = vec_and(vbc, vbc); in test1()
506 res_vsc = vec_vand(vbc, vsc); in test1()
510 res_vsc = vec_vand(vsc, vbc); in test1()
518 res_vuc = vec_vand(vbc, vuc); in test1()
522 res_vuc = vec_vand(vuc, vbc); in test1()
526 res_vbc = vec_vand(vbc, vbc); in test1()
593 res_vsc = vec_andc(vbc, vsc); in test1()
599 res_vsc = vec_andc(vsc, vbc); in test1()
611 res_vuc = vec_andc(vbc, vuc); in test1()
617 res_vuc = vec_andc(vuc, vbc); in test1()
621 res_vbc = vec_andc(vbc, vbc); in test1()
729 res_vsc = vec_vandc(vbc, vsc); in test1()
735 res_vsc = vec_vandc(vsc, vbc); in test1()
747 res_vuc = vec_vandc(vbc, vuc); in test1()
753 res_vuc = vec_vandc(vuc, vbc); in test1()
759 res_vbc = vec_vandc(vbc, vbc); in test1()
1241 res_vbc = vec_ld(0, &vbc); in test6()
1313 res_vbc = vec_lvx(0, &vbc); in test6()
1443 res_vbc = vec_ldl(0, &vbc); in test6()
1511 res_vbc = vec_lvxl(0, &vbc); in test6()
1613 res_vsc = vec_max(vbc, vsc); in test6()
1617 res_vsc = vec_max(vsc, vbc); in test6()
1625 res_vuc = vec_max(vbc, vuc); in test6()
1629 res_vuc = vec_max(vuc, vbc); in test6()
1689 res_vsc = vec_vmaxsb(vbc, vsc); in test6()
1693 res_vsc = vec_vmaxsb(vsc, vbc); in test6()
1701 res_vuc = vec_vmaxub(vbc, vuc); in test6()
1705 res_vuc = vec_vmaxub(vuc, vbc); in test6()
1770 res_vbc = vec_mergeh(vbc, vbc); in test6()
1814 res_vbc = vec_vmrghb(vbc, vbc); in test6()
1859 res_vbc = vec_mergel(vbc, vbc); in test6()
1903 res_vbc = vec_vmrglb(vbc, vbc); in test6()
1949 res_vsc = vec_min(vbc, vsc); in test6()
1953 res_vsc = vec_min(vsc, vbc); in test6()
1961 res_vuc = vec_min(vbc, vuc); in test6()
1965 res_vuc = vec_min(vuc, vbc); in test6()
2025 res_vsc = vec_vminsb(vbc, vsc); in test6()
2029 res_vsc = vec_vminsb(vsc, vbc); in test6()
2037 res_vuc = vec_vminub(vbc, vuc); in test6()
2041 res_vuc = vec_vminub(vuc, vbc); in test6()
2190 vec_mtvscr(vbc); in test6()
2335 res_vuc = vec_nor(vbc, vbc); in test6()
2395 res_vuc = vec_vnor(vbc, vbc); in test6()
2448 res_vsc = vec_or(vbc, vsc); in test6()
2452 res_vsc = vec_or(vsc, vbc); in test6()
2460 res_vuc = vec_or(vbc, vuc); in test6()
2464 res_vuc = vec_or(vuc, vbc); in test6()
2468 res_vbc = vec_or(vbc, vbc); in test6()
2544 res_vsc = vec_vor(vbc, vsc); in test6()
2548 res_vsc = vec_vor(vsc, vbc); in test6()
2556 res_vuc = vec_vor(vbc, vuc); in test6()
2560 res_vuc = vec_vor(vuc, vbc); in test6()
2564 res_vbc = vec_vor(vbc, vbc); in test6()
2769 res_vbc = vec_perm(vbc, vbc, vuc); in test6()
2813 res_vbc = vec_vperm(vbc, vbc, vuc); in test6()
2936 res_vsc = vec_sel(vsc, vsc, vbc); in test6()
2956 res_vuc = vec_sel(vuc, vuc, vbc); in test6()
2966 res_vbc = vec_sel(vbc, vbc, vuc); in test6()
2976 res_vbc = vec_sel(vbc, vbc, vbc); in test6()
3136 res_vsc = vec_vsel(vsc, vsc, vbc); in test6()
3156 res_vuc = vec_vsel(vuc, vuc, vbc); in test6()
3166 res_vbc = vec_vsel(vbc, vbc, vuc); in test6()
3176 res_vbc = vec_vsel(vbc, vbc, vbc); in test6()
3623 res_vbc = vec_sll(vbc, vuc); in test6()
3627 res_vbc = vec_sll(vbc, vus); in test6()
3631 res_vbc = vec_sll(vbc, vui); in test6()
3743 res_vbc = vec_vsl(vbc, vuc); in test6()
3747 res_vbc = vec_vsl(vbc, vus); in test6()
3751 res_vbc = vec_vsl(vbc, vui); in test6()
3977 res_vbc = vec_splat(vbc, 0); in test6()
4021 res_vbc = vec_vspltb(vbc, 0); in test6()
4201 res_vbc = vec_srl(vbc, vuc); in test6()
4205 res_vbc = vec_srl(vbc, vus); in test6()
4209 res_vbc = vec_srl(vbc, vui); in test6()
4321 res_vbc = vec_vsr(vbc, vuc); in test6()
4325 res_vbc = vec_vsr(vbc, vus); in test6()
4329 res_vbc = vec_vsr(vbc, vui); in test6()
4563 vec_st(vbc, 0, ¶m_uc); in test6()
4567 vec_st(vbc, 0, ¶m_uc); in test6()
4571 vec_st(vbc, 0, &vbc); in test6()
4667 vec_stvx(vbc, 0, ¶m_uc); in test6()
4671 vec_stvx(vbc, 0, ¶m_uc); in test6()
4675 vec_stvx(vbc, 0, &vbc); in test6()
4764 vec_ste(vbc, 0, ¶m_sc); in test6()
4768 vec_ste(vbc, 0, ¶m_uc); in test6()
4824 vec_stvebx(vbc, 0, ¶m_sc); in test6()
4828 vec_stvebx(vbc, 0, ¶m_uc); in test6()
4893 vec_stl(vbc, 0, ¶m_sc); in test6()
4897 vec_stl(vbc, 0, ¶m_uc); in test6()
4901 vec_stl(vbc, 0, &vbc); in test6()
4997 vec_stvxl(vbc, 0, ¶m_sc); in test6()
5001 vec_stvxl(vbc, 0, ¶m_uc); in test6()
5005 vec_stvxl(vbc, 0, &vbc); in test6()
5090 res_vsc = vec_sub(vbc, vsc); in test6()
5094 res_vsc = vec_sub(vsc, vbc); in test6()
5102 res_vuc = vec_sub(vbc, vuc); in test6()
5106 res_vuc = vec_sub(vuc, vbc); in test6()
5166 res_vsc = vec_vsububm(vbc, vsc); in test6()
5170 res_vsc = vec_vsububm(vsc, vbc); in test6()
5178 res_vuc = vec_vsububm(vbc, vuc); in test6()
5182 res_vuc = vec_vsububm(vuc, vbc); in test6()
5252 res_vsc = vec_subs(vbc, vsc); in test6()
5256 res_vsc = vec_subs(vsc, vbc); in test6()
5264 res_vuc = vec_subs(vbc, vuc); in test6()
5268 res_vuc = vec_subs(vuc, vbc); in test6()
5324 res_vsc = vec_vsubsbs(vbc, vsc); in test6()
5328 res_vsc = vec_vsubsbs(vsc, vbc); in test6()
5336 res_vuc = vec_vsububs(vbc, vuc); in test6()
5340 res_vuc = vec_vsububs(vuc, vbc); in test6()
5455 res_vbs = vec_unpackh(vbc); in test6()
5475 res_vbs = vec_vupkhsb(vbc); in test6()
5496 res_vbs = vec_unpackl(vbc); in test6()
5516 res_vbs = vec_vupklsb(vbc); in test6()
5537 res_vsc = vec_xor(vbc, vsc); in test6()
5541 res_vsc = vec_xor(vsc, vbc); in test6()
5549 res_vuc = vec_xor(vbc, vuc); in test6()
5553 res_vuc = vec_xor(vuc, vbc); in test6()
5557 res_vbc = vec_xor(vbc, vbc); in test6()
5633 res_vsc = vec_vxor(vbc, vsc); in test6()
5637 res_vsc = vec_vxor(vsc, vbc); in test6()
5645 res_vuc = vec_vxor(vbc, vuc); in test6()
5649 res_vuc = vec_vxor(vuc, vbc); in test6()
5653 res_vbc = vec_vxor(vbc, vbc); in test6()
5736 res_uc = vec_extract(vbc, param_i); in test6()
5777 res_vbc = vec_insert(param_uc, vbc, param_i); in test6()
5850 res_vbc = vec_lvlx(0, &vbc); in test6()
6021 res_vbc = vec_lvlxl(0, &vbc); in test6()
6192 res_vbc = vec_lvrx(0, &vbc); in test6()
6363 res_vbc = vec_lvrxl(0, &vbc); in test6()
6558 vec_stvlx(vbc, 0, &vbc); in test6()
6831 vec_stvlxl(vbc, 0, &vbc); in test6()
7104 vec_stvrx(vbc, 0, &vbc); in test6()
7377 vec_stvrxl(vbc, 0, &vbc); in test6()
7664 res_i = vec_all_eq(vsc, vbc); in test6()
7672 res_i = vec_all_eq(vuc, vbc); in test6()
7676 res_i = vec_all_eq(vbc, vsc); in test6()
7680 res_i = vec_all_eq(vbc, vuc); in test6()
7684 res_i = vec_all_eq(vbc, vbc); in test6()
7757 res_i = vec_all_ge(vsc, vbc); in test6()
7765 res_i = vec_all_ge(vuc, vbc); in test6()
7769 res_i = vec_all_ge(vbc, vsc); in test6()
7773 res_i = vec_all_ge(vbc, vuc); in test6()
7777 res_i = vec_all_ge(vbc, vbc); in test6()
7846 res_i = vec_all_gt(vsc, vbc); in test6()
7854 res_i = vec_all_gt(vuc, vbc); in test6()
7858 res_i = vec_all_gt(vbc, vsc); in test6()
7862 res_i = vec_all_gt(vbc, vuc); in test6()
7866 res_i = vec_all_gt(vbc, vbc); in test6()
7940 res_i = vec_all_le(vsc, vbc); in test6()
7948 res_i = vec_all_le(vuc, vbc); in test6()
7952 res_i = vec_all_le(vbc, vsc); in test6()
7956 res_i = vec_all_le(vbc, vuc); in test6()
7960 res_i = vec_all_le(vbc, vbc); in test6()
8029 res_i = vec_all_lt(vsc, vbc); in test6()
8037 res_i = vec_all_lt(vuc, vbc); in test6()
8041 res_i = vec_all_lt(vbc, vsc); in test6()
8045 res_i = vec_all_lt(vbc, vuc); in test6()
8049 res_i = vec_all_lt(vbc, vbc); in test6()
8123 res_i = vec_all_ne(vsc, vbc); in test6()
8131 res_i = vec_all_ne(vuc, vbc); in test6()
8135 res_i = vec_all_ne(vbc, vsc); in test6()
8139 res_i = vec_all_ne(vbc, vuc); in test6()
8143 res_i = vec_all_ne(vbc, vbc); in test6()
8241 res_i = vec_any_eq(vsc, vbc); in test6()
8249 res_i = vec_any_eq(vuc, vbc); in test6()
8253 res_i = vec_any_eq(vbc, vsc); in test6()
8257 res_i = vec_any_eq(vbc, vuc); in test6()
8261 res_i = vec_any_eq(vbc, vbc); in test6()
8334 res_i = vec_any_ge(vsc, vbc); in test6()
8342 res_i = vec_any_ge(vuc, vbc); in test6()
8346 res_i = vec_any_ge(vbc, vsc); in test6()
8350 res_i = vec_any_ge(vbc, vuc); in test6()
8354 res_i = vec_any_ge(vbc, vbc); in test6()
8423 res_i = vec_any_gt(vsc, vbc); in test6()
8431 res_i = vec_any_gt(vuc, vbc); in test6()
8435 res_i = vec_any_gt(vbc, vsc); in test6()
8439 res_i = vec_any_gt(vbc, vuc); in test6()
8443 res_i = vec_any_gt(vbc, vbc); in test6()
8512 res_i = vec_any_le(vsc, vbc); in test6()
8520 res_i = vec_any_le(vuc, vbc); in test6()
8524 res_i = vec_any_le(vbc, vsc); in test6()
8528 res_i = vec_any_le(vbc, vuc); in test6()
8532 res_i = vec_any_le(vbc, vbc); in test6()
8601 res_i = vec_any_lt(vsc, vbc); in test6()
8609 res_i = vec_any_lt(vuc, vbc); in test6()
8613 res_i = vec_any_lt(vbc, vsc); in test6()
8617 res_i = vec_any_lt(vbc, vuc); in test6()
8621 res_i = vec_any_lt(vbc, vbc); in test6()
8695 res_i = vec_any_ne(vsc, vbc); in test6()
8703 res_i = vec_any_ne(vuc, vbc); in test6()
8707 res_i = vec_any_ne(vbc, vsc); in test6()
8711 res_i = vec_any_ne(vbc, vuc); in test6()
8715 res_i = vec_any_ne(vbc, vbc); in test6()