• Home
  • Raw
  • Download

Lines Matching refs:t0

115 my ($t0,$t1,$t2,$t3,$t4)=("%rax","%rdx","%rcx","%r12","%r13");
139 mov $a0, $t0
153 cmovc $t0, $a0
189 mov $a0, $t0
203 test \$1, $t0
205 cmovz $t0, $a0
211 mov $a1, $t0 # a0:a3>>1
213 shl \$63, $t0
216 or $t0, $a0
263 mov $a0, $t0
277 cmovc $t0, $a0
285 mov $a0, $t0
299 cmovc $t0, $a0
341 mov $a0, $t0
355 cmovc $t0, $a0
397 mov $a0, $t0
411 cmovz $t0, $a0
453 mov $a0, $t0
467 cmovz $t0, $a0
491 my ($t0,$t1,$t2,$t3,$t4)=("%rcx","%rbp","%rbx","%rdx","%rax");
534 mov %rax, $t0
537 mov $t0, %rax
542 mov $t0, %rax
548 mov $t0, %rax
567 mov %rdx, $t0
573 add $t0, $acc1
592 mov %rax, $t0
595 mov $t0, %rax
603 mov $t0, %rax
611 mov $t0, %rax
614 mov $acc1, $t0
630 add %rax, $t0 # guaranteed to be zero
632 adc %rdx, $t0
638 add $t0, $acc2
657 mov %rax, $t0
660 mov $t0, %rax
668 mov $t0, %rax
676 mov $t0, %rax
679 mov $acc2, $t0
695 add %rax, $t0 # guaranteed to be zero
697 adc %rdx, $t0
703 add $t0, $acc3
722 mov %rax, $t0
725 mov $t0, %rax
733 mov $t0, %rax
741 mov $t0, %rax
744 mov $acc3, $t0
760 add %rax, $t0 # guaranteed to be zero
762 adc %rdx, $t0
768 add $t0, $acc4
790 mov $acc0, $t0
798 cmovc $t0, $acc0
942 mov $acc0, $t0
954 add %rax, $t0 # guaranteed to be zero
956 adc %rdx, $t0
962 add $t0, $acc1
970 mov $acc1, $t0
985 add %rax, $t0 # guaranteed to be zero
987 adc %rdx, $t0
993 add $t0, $acc2
1001 mov $acc2, $t0
1016 add %rax, $t0 # guaranteed to be zero
1018 adc %rdx, $t0
1024 add $t0, $acc3
1032 mov $acc3, $t0
1047 add %rax, $t0 # guaranteed to be zero
1049 adc %rdx, $t0
1055 add $t0, $acc0
1159 mulx $acc2, $t0, $acc2
1161 add $t0, $acc1
1162 mulx $acc4, $t0, $acc4
1166 adc $t0, $acc3
1171 mulx 8*0+128(%r14), $t0, $t1
1172 adcx $t0, $acc0 # guaranteed to be zero
1175 mulx 8*1+128(%r14), $t0, $t1
1176 adcx $t0, $acc1
1179 mulx 8*2+128(%r14), $t0, $t1
1180 adcx $t0, $acc2
1183 mulx 8*3+128(%r14), $t0, $t1
1185 adcx $t0, $acc3
1192 mulx 8*0+128($a_ptr), $t0, $t1
1193 adcx $t0, $acc1
1196 mulx 8*1+128($a_ptr), $t0, $t1
1197 adcx $t0, $acc2
1200 mulx 8*2+128($a_ptr), $t0, $t1
1201 adcx $t0, $acc3
1204 mulx 8*3+128($a_ptr), $t0, $t1
1207 adcx $t0, $acc4
1215 mulx 8*0+128(%r14), $t0, $t1
1216 adcx $t0, $acc1 # guaranteed to be zero
1219 mulx 8*1+128(%r14), $t0, $t1
1220 adcx $t0, $acc2
1223 mulx 8*2+128(%r14), $t0, $t1
1224 adcx $t0, $acc3
1227 mulx 8*3+128(%r14), $t0, $t1
1229 adcx $t0, $acc4
1236 mulx 8*0+128($a_ptr), $t0, $t1
1237 adcx $t0, $acc2
1240 mulx 8*1+128($a_ptr), $t0, $t1
1241 adcx $t0, $acc3
1244 mulx 8*2+128($a_ptr), $t0, $t1
1245 adcx $t0, $acc4
1248 mulx 8*3+128($a_ptr), $t0, $t1
1251 adcx $t0, $acc5
1259 mulx 8*0+128(%r14), $t0, $t1
1260 adcx $t0, $acc2 # guaranteed to be zero
1263 mulx 8*1+128(%r14), $t0, $t1
1264 adcx $t0, $acc3
1267 mulx 8*2+128(%r14), $t0, $t1
1268 adcx $t0, $acc4
1271 mulx 8*3+128(%r14), $t0, $t1
1273 adcx $t0, $acc5
1280 mulx 8*0+128($a_ptr), $t0, $t1
1281 adcx $t0, $acc3
1284 mulx 8*1+128($a_ptr), $t0, $t1
1285 adcx $t0, $acc4
1288 mulx 8*2+128($a_ptr), $t0, $t1
1289 adcx $t0, $acc5
1292 mulx 8*3+128($a_ptr), $t0, $t1
1295 adcx $t0, $acc0
1303 mulx 8*0+128(%r14), $t0, $t1
1304 adcx $t0, $acc3 # guaranteed to be zero
1307 mulx 8*1+128(%r14), $t0, $t1
1308 adcx $t0, $acc4
1311 mulx 8*2+128(%r14), $t0, $t1
1312 adcx $t0, $acc5
1315 mulx 8*3+128(%r14), $t0, $t1
1318 adcx $t0, $acc0
1327 mov $acc0, $t0
1337 cmovc $t0, $acc0
1394 mulx $acc7, $t0, $acc3 # a[0]*a[2]
1399 add $t0, $acc2
1405 mulx $acc7, $t0, $t1 # a[1]*a[2]
1406 adcx $t0, $acc3
1409 mulx $acc0, $t0, $t1 # a[1]*a[3]
1411 adcx $t0, $acc4
1415 mulx $acc0, $t0, $acc6 # a[2]*a[3]
1420 adox $t0, $acc5
1430 mulx %rdx, $t0, $t4
1433 adox $t0, $acc2
1435 mulx %rdx, $t0, $t1
1440 adox $t0, $acc4
1442 mulx %rdx, $t0, $t4
1443 adox $t0, $acc6
1448 mulx 8*4($a_ptr), %rdx, $t0
1451 mulx 8*0($a_ptr), $t0, $t1
1452 adcx $t0, $acc0 # guaranteed to be zero
1454 mulx 8*1($a_ptr), $t0, $t1
1455 adcx $t0, $acc1
1457 mulx 8*2($a_ptr), $t0, $t1
1458 adcx $t0, $acc2
1460 mulx 8*3($a_ptr), $t0, $t1
1461 adcx $t0, $acc3
1467 mulx 8*4($a_ptr), %rdx, $t0
1469 mulx 8*0($a_ptr), $t0, $t1
1470 adox $t0, $acc1 # guaranteed to be zero
1472 mulx 8*1($a_ptr), $t0, $t1
1473 adox $t0, $acc2
1475 mulx 8*2($a_ptr), $t0, $t1
1476 adox $t0, $acc3
1478 mulx 8*3($a_ptr), $t0, $t1
1479 adox $t0, $acc0
1485 mulx 8*4($a_ptr), %rdx, $t0
1487 mulx 8*0($a_ptr), $t0, $t1
1488 adcx $t0, $acc2 # guaranteed to be zero
1490 mulx 8*1($a_ptr), $t0, $t1
1491 adcx $t0, $acc3
1493 mulx 8*2($a_ptr), $t0, $t1
1494 adcx $t0, $acc0
1496 mulx 8*3($a_ptr), $t0, $t1
1497 adcx $t0, $acc1
1503 mulx 8*4($a_ptr), %rdx, $t0
1505 mulx 8*0($a_ptr), $t0, $t1
1506 adox $t0, $acc3 # guaranteed to be zero
1508 mulx 8*1($a_ptr), $t0, $t1
1509 adox $t0, $acc0
1511 mulx 8*2($a_ptr), $t0, $t1
1512 adox $t0, $acc1
1514 mulx 8*3($a_ptr), $t0, $t1
1515 adox $t0, $acc2
1737 mov %rdx, $t0
1740 add $t0, $acc2
1745 mov %rdx, $t0
1748 add $t0, $acc3
1753 mov %rdx, $t0
1756 add $t0, $acc4
1784 mov %rdx, $t0
1787 add $t0, $acc3
1792 mov %rdx, $t0
1795 add $t0, $acc4
1800 mov %rdx, $t0
1803 add $t0, $acc5
1831 mov %rdx, $t0
1834 add $t0, $acc4
1839 mov %rdx, $t0
1842 add $t0, $acc5
1847 mov %rdx, $t0
1850 add $t0, $acc0
1865 mov $acc4, $t0
1881 cmovc $t0, $acc4
2028 mov %rdx, $t0
2031 add $t0, $acc1
2035 mov %rdx, $t0
2038 add $t0, $acc3
2042 mov %rdx, $t0
2045 add $t0, $acc5
2056 mov $acc0, $t0
2059 shr \$32, $t0
2061 adc $t0, $acc2
2068 mov $acc1, $t0
2072 shr \$32, $t0
2074 adc $t0, $acc3
2081 mov $acc2, $t0
2085 shr \$32, $t0
2087 adc $t0, $acc0
2094 mov $acc3, $t0
2098 shr \$32, $t0
2100 adc $t0, $acc1
2119 mov $acc7, $t0
2128 cmovc $t0, $acc7
2146 mulx $acc2, $t0, $acc2
2151 adc $t0, $acc1
2152 mulx $acc4, $t0, $acc4
2156 adc $t0, $acc3
2157 shrx $poly1,$acc0,$t0
2163 adc $t0, $acc2
2165 mulx $poly3, $t0, $t1
2167 adc $t0, $acc3
2174 mulx 8*0+128($a_ptr), $t0, $t1
2175 adcx $t0, $acc1
2178 mulx 8*1+128($a_ptr), $t0, $t1
2179 adcx $t0, $acc2
2182 mulx 8*2+128($a_ptr), $t0, $t1
2183 adcx $t0, $acc3
2186 mulx 8*3+128($a_ptr), $t0, $t1
2188 adcx $t0, $acc4
2189 shlx $poly1, $acc1, $t0
2199 add $t0, $acc2
2202 mulx $poly3, $t0, $t1
2204 adc $t0, $acc4
2211 mulx 8*0+128($a_ptr), $t0, $t1
2212 adcx $t0, $acc2
2215 mulx 8*1+128($a_ptr), $t0, $t1
2216 adcx $t0, $acc3
2219 mulx 8*2+128($a_ptr), $t0, $t1
2220 adcx $t0, $acc4
2223 mulx 8*3+128($a_ptr), $t0, $t1
2225 adcx $t0, $acc5
2226 shlx $poly1, $acc2, $t0
2236 add $t0, $acc3
2239 mulx $poly3, $t0, $t1
2241 adc $t0, $acc5
2248 mulx 8*0+128($a_ptr), $t0, $t1
2249 adcx $t0, $acc3
2252 mulx 8*1+128($a_ptr), $t0, $t1
2253 adcx $t0, $acc4
2256 mulx 8*2+128($a_ptr), $t0, $t1
2257 adcx $t0, $acc5
2260 mulx 8*3+128($a_ptr), $t0, $t1
2262 adcx $t0, $acc0
2263 shlx $poly1, $acc3, $t0
2273 add $t0, $acc4
2276 mulx $poly3, $t0, $t1
2279 adc $t0, $acc0
2287 mov $acc0, $t0
2298 cmovc $t0, $acc0
2313 mulx $acc7, $t0, $acc3 # a[0]*a[2]
2315 adc $t0, $acc2
2323 mulx $acc7, $t0, $t1 # a[1]*a[2]
2324 adcx $t0, $acc3
2327 mulx $acc0, $t0, $t1 # a[1]*a[3]
2329 adcx $t0, $acc4
2334 mulx $acc0, $t0, $acc6 # a[2]*a[3]
2338 adox $t0, $acc5
2347 mulx %rdx, $t0, $t4
2350 adox $t0, $acc2
2353 mulx %rdx, $t0, $t1
2357 adox $t0, $acc4
2361 mulx %rdx, $t0, $t4
2363 adox $t0, $acc6
2364 shlx $a_ptr, $acc0, $t0
2370 add $t0, $acc1
2373 mulx $acc0, $t0, $acc0
2374 adc $t0, $acc3
2375 shlx $a_ptr, $acc1, $t0
2380 add $t0, $acc2
2383 mulx $acc1, $t0, $acc1
2384 adc $t0, $acc0
2385 shlx $a_ptr, $acc2, $t0
2390 add $t0, $acc3
2393 mulx $acc2, $t0, $acc2
2394 adc $t0, $acc1
2395 shlx $a_ptr, $acc3, $t0
2400 add $t0, $acc0
2403 mulx $acc3, $t0, $acc3
2404 adc $t0, $acc2
2443 my ($t0,$t1,$t2)=("%rcx","%r12","%r13");
2473 mov %rax, $t0
2476 shr \$32, $t0
2478 adc $t0, $acc2
2485 mov $acc1, $t0
2489 shr \$32, $t0
2491 adc $t0, $acc3
2498 mov $acc2, $t0
2502 shr \$32, $t0
2504 adc $t0, $acc0
2511 mov $acc3, $t0
2515 shr \$32, $t0
2517 adc $t0, $acc1
2518 mov $acc0, $t0
2533 cmovnz $t0, $acc0
3054 my ($t0,$t1,$t2,$t3,$t4)=("%rax","%rbp","%rcx",$acc4,$acc4);
3095 mov $a0, $t0
3109 cmovc $t0, $a0
3128 mov $a0, $t0
3142 cmovz $t0, $a0
3159 sub $a0, $t0
3161 mov $t0, $a0
3167 add \$-1, $t0
3175 cmovnz $t0, $a0
3191 mov $a0, $t0
3205 cmovc $t0, $a0
3345 mov $a0, $t0
3355 test \$1, $t0
3357 cmovz $t0, $a0
3363 mov $a1, $t0 # a0:a3>>1
3365 shl \$63, $t0
3368 or $t0, $a0
3414 mov $S+8*0(%rsp), $t0
3691 mov $acc0, $t0
3705 cmovc $t0, $acc0
3706 mov 8*0($a_ptr), $t0
3720 mov $U2+8*0(%rsp), $t0
4016 mov $acc0, $t0
4030 cmovc $t0, $acc0
4031 mov 8*0($a_ptr), $t0
4045 mov $U2+8*0(%rsp), $t0
4187 mov $a0, $t0
4202 cmovc $t0, $a0
4222 mov $a0, $t0
4237 cmovnc $t0, $a0
4255 sbb $a0, $t0
4257 mov $t0, $a0
4264 adc \$-1, $t0
4272 cmovc $t0, $a0
4288 mov $a0, $t0
4303 cmovc $t0, $a0