Lines Matching refs:T1
605 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
621 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
639 movq (%r10), \T1
642 vpslldq $8, \T1, \T1
644 vpxor \T1, \T7, \T7
650 movq %rax, \T1
653 vpslldq $12, \T1, \T1
655 vpxor \T1, \T7, \T7
662 vmovdqu aad_shift_arr(%r11), \T1
663 vpshufb \T1, \T7, \T7
667 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
895 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
902 vpclmulqdq $0x11, \HK, \GH, \T1 # T1 = a1*b1
906 vpxor \T1, \T2,\T2 # T2 = a0*b1+a1*b0
911 vpxor \T2, \T1, \T1 # <T1:GH> = GH x HK
936 vpxor \T1, \GH, \GH # the result is in GH
941 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
946 vpshufd $0b01001110, \T5, \T1
947 vpxor \T5, \T1, \T1
948 vmovdqu \T1, HashKey_k(arg2)
950 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
952 vpshufd $0b01001110, \T5, \T1
953 vpxor \T5, \T1, \T1
954 vmovdqu \T1, HashKey_2_k(arg2)
956 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
958 vpshufd $0b01001110, \T5, \T1
959 vpxor \T5, \T1, \T1
960 vmovdqu \T1, HashKey_3_k(arg2)
962 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
964 vpshufd $0b01001110, \T5, \T1
965 vpxor \T5, \T1, \T1
966 vmovdqu \T1, HashKey_4_k(arg2)
968 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
970 vpshufd $0b01001110, \T5, \T1
971 vpxor \T5, \T1, \T1
972 vmovdqu \T1, HashKey_5_k(arg2)
974 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
976 vpshufd $0b01001110, \T5, \T1
977 vpxor \T5, \T1, \T1
978 vmovdqu \T1, HashKey_6_k(arg2)
980 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
982 vpshufd $0b01001110, \T5, \T1
983 vpxor \T5, \T1, \T1
984 vmovdqu \T1, HashKey_7_k(arg2)
986 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
988 vpshufd $0b01001110, \T5, \T1
989 vpxor \T5, \T1, \T1
990 vmovdqu \T1, HashKey_8_k(arg2)
1001 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 X…
1056 vmovdqu (arg4, %r11), \T1
1057 vpxor \T1, reg_i, reg_i
1061 vmovdqa \T1, reg_i
1075 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1158 vmovdqu (arg4, %r11), \T1
1159 vpxor \T1, \XMM1, \XMM1
1162 vmovdqa \T1, \XMM1
1165 vmovdqu 16*1(arg4, %r11), \T1
1166 vpxor \T1, \XMM2, \XMM2
1169 vmovdqa \T1, \XMM2
1172 vmovdqu 16*2(arg4, %r11), \T1
1173 vpxor \T1, \XMM3, \XMM3
1176 vmovdqa \T1, \XMM3
1179 vmovdqu 16*3(arg4, %r11), \T1
1180 vpxor \T1, \XMM4, \XMM4
1183 vmovdqa \T1, \XMM4
1186 vmovdqu 16*4(arg4, %r11), \T1
1187 vpxor \T1, \XMM5, \XMM5
1190 vmovdqa \T1, \XMM5
1193 vmovdqu 16*5(arg4, %r11), \T1
1194 vpxor \T1, \XMM6, \XMM6
1197 vmovdqa \T1, \XMM6
1200 vmovdqu 16*6(arg4, %r11), \T1
1201 vpxor \T1, \XMM7, \XMM7
1204 vmovdqa \T1, \XMM7
1207 vmovdqu 16*7(arg4, %r11), \T1
1208 vpxor \T1, \XMM8, \XMM8
1211 vmovdqa \T1, \XMM8
1236 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
1281 vmovdqu (arg1), \T1
1282 vpxor \T1, \XMM1, \XMM1
1283 vpxor \T1, \XMM2, \XMM2
1284 vpxor \T1, \XMM3, \XMM3
1285 vpxor \T1, \XMM4, \XMM4
1286 vpxor \T1, \XMM5, \XMM5
1287 vpxor \T1, \XMM6, \XMM6
1288 vpxor \T1, \XMM7, \XMM7
1289 vpxor \T1, \XMM8, \XMM8
1297 vmovdqu 16*1(arg1), \T1
1298 vaesenc \T1, \XMM1, \XMM1
1299 vaesenc \T1, \XMM2, \XMM2
1300 vaesenc \T1, \XMM3, \XMM3
1301 vaesenc \T1, \XMM4, \XMM4
1302 vaesenc \T1, \XMM5, \XMM5
1303 vaesenc \T1, \XMM6, \XMM6
1304 vaesenc \T1, \XMM7, \XMM7
1305 vaesenc \T1, \XMM8, \XMM8
1307 vmovdqu 16*2(arg1), \T1
1308 vaesenc \T1, \XMM1, \XMM1
1309 vaesenc \T1, \XMM2, \XMM2
1310 vaesenc \T1, \XMM3, \XMM3
1311 vaesenc \T1, \XMM4, \XMM4
1312 vaesenc \T1, \XMM5, \XMM5
1313 vaesenc \T1, \XMM6, \XMM6
1314 vaesenc \T1, \XMM7, \XMM7
1315 vaesenc \T1, \XMM8, \XMM8
1330 vmovdqu 16*3(arg1), \T1
1331 vaesenc \T1, \XMM1, \XMM1
1332 vaesenc \T1, \XMM2, \XMM2
1333 vaesenc \T1, \XMM3, \XMM3
1334 vaesenc \T1, \XMM4, \XMM4
1335 vaesenc \T1, \XMM5, \XMM5
1336 vaesenc \T1, \XMM6, \XMM6
1337 vaesenc \T1, \XMM7, \XMM7
1338 vaesenc \T1, \XMM8, \XMM8
1340 vmovdqa TMP2(%rsp), \T1
1342 vpclmulqdq $0x11, \T5, \T1, \T3
1344 vpclmulqdq $0x00, \T5, \T1, \T3
1347 vpshufd $0b01001110, \T1, \T3
1348 vpxor \T1, \T3, \T3
1353 vmovdqu 16*4(arg1), \T1
1354 vaesenc \T1, \XMM1, \XMM1
1355 vaesenc \T1, \XMM2, \XMM2
1356 vaesenc \T1, \XMM3, \XMM3
1357 vaesenc \T1, \XMM4, \XMM4
1358 vaesenc \T1, \XMM5, \XMM5
1359 vaesenc \T1, \XMM6, \XMM6
1360 vaesenc \T1, \XMM7, \XMM7
1361 vaesenc \T1, \XMM8, \XMM8
1365 vmovdqa TMP3(%rsp), \T1
1367 vpclmulqdq $0x11, \T5, \T1, \T3
1369 vpclmulqdq $0x00, \T5, \T1, \T3
1372 vpshufd $0b01001110, \T1, \T3
1373 vpxor \T1, \T3, \T3
1378 vmovdqu 16*5(arg1), \T1
1379 vaesenc \T1, \XMM1, \XMM1
1380 vaesenc \T1, \XMM2, \XMM2
1381 vaesenc \T1, \XMM3, \XMM3
1382 vaesenc \T1, \XMM4, \XMM4
1383 vaesenc \T1, \XMM5, \XMM5
1384 vaesenc \T1, \XMM6, \XMM6
1385 vaesenc \T1, \XMM7, \XMM7
1386 vaesenc \T1, \XMM8, \XMM8
1388 vmovdqa TMP4(%rsp), \T1
1390 vpclmulqdq $0x11, \T5, \T1, \T3
1392 vpclmulqdq $0x00, \T5, \T1, \T3
1395 vpshufd $0b01001110, \T1, \T3
1396 vpxor \T1, \T3, \T3
1401 vmovdqu 16*6(arg1), \T1
1402 vaesenc \T1, \XMM1, \XMM1
1403 vaesenc \T1, \XMM2, \XMM2
1404 vaesenc \T1, \XMM3, \XMM3
1405 vaesenc \T1, \XMM4, \XMM4
1406 vaesenc \T1, \XMM5, \XMM5
1407 vaesenc \T1, \XMM6, \XMM6
1408 vaesenc \T1, \XMM7, \XMM7
1409 vaesenc \T1, \XMM8, \XMM8
1412 vmovdqa TMP5(%rsp), \T1
1414 vpclmulqdq $0x11, \T5, \T1, \T3
1416 vpclmulqdq $0x00, \T5, \T1, \T3
1419 vpshufd $0b01001110, \T1, \T3
1420 vpxor \T1, \T3, \T3
1425 vmovdqu 16*7(arg1), \T1
1426 vaesenc \T1, \XMM1, \XMM1
1427 vaesenc \T1, \XMM2, \XMM2
1428 vaesenc \T1, \XMM3, \XMM3
1429 vaesenc \T1, \XMM4, \XMM4
1430 vaesenc \T1, \XMM5, \XMM5
1431 vaesenc \T1, \XMM6, \XMM6
1432 vaesenc \T1, \XMM7, \XMM7
1433 vaesenc \T1, \XMM8, \XMM8
1435 vmovdqa TMP6(%rsp), \T1
1437 vpclmulqdq $0x11, \T5, \T1, \T3
1439 vpclmulqdq $0x00, \T5, \T1, \T3
1442 vpshufd $0b01001110, \T1, \T3
1443 vpxor \T1, \T3, \T3
1449 vmovdqu 16*8(arg1), \T1
1450 vaesenc \T1, \XMM1, \XMM1
1451 vaesenc \T1, \XMM2, \XMM2
1452 vaesenc \T1, \XMM3, \XMM3
1453 vaesenc \T1, \XMM4, \XMM4
1454 vaesenc \T1, \XMM5, \XMM5
1455 vaesenc \T1, \XMM6, \XMM6
1456 vaesenc \T1, \XMM7, \XMM7
1457 vaesenc \T1, \XMM8, \XMM8
1459 vmovdqa TMP7(%rsp), \T1
1461 vpclmulqdq $0x11, \T5, \T1, \T3
1463 vpclmulqdq $0x00, \T5, \T1, \T3
1466 vpshufd $0b01001110, \T1, \T3
1467 vpxor \T1, \T3, \T3
1484 vmovdqa TMP8(%rsp), \T1
1486 vpclmulqdq $0x11, \T5, \T1, \T3
1488 vpclmulqdq $0x00, \T5, \T1, \T3
1491 vpshufd $0b01001110, \T1, \T3
1492 vpxor \T1, \T3, \T3
1556 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1580 vpxor \T1, \T2, \T2
1603 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1744 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1758 vpxor \T1, \T2, \T2
1877 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1879 vpclmulqdq $0x11,\HK,\GH,\T1 # T1 = a1*b1
1889 vpxor \T3, \T1, \T1
1910 vpxor \T1, \GH, \GH # the result is in GH
1915 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1919 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1922 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1925 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1928 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1931 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1934 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1937 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1949 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 …
2005 vmovdqu (arg4, %r11), \T1
2006 vpxor \T1, reg_i, reg_i
2011 vmovdqa \T1, reg_i
2025 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
2109 vmovdqu (arg4, %r11), \T1
2110 vpxor \T1, \XMM1, \XMM1
2113 vmovdqa \T1, \XMM1
2116 vmovdqu 16*1(arg4, %r11), \T1
2117 vpxor \T1, \XMM2, \XMM2
2120 vmovdqa \T1, \XMM2
2123 vmovdqu 16*2(arg4, %r11), \T1
2124 vpxor \T1, \XMM3, \XMM3
2127 vmovdqa \T1, \XMM3
2130 vmovdqu 16*3(arg4, %r11), \T1
2131 vpxor \T1, \XMM4, \XMM4
2134 vmovdqa \T1, \XMM4
2137 vmovdqu 16*4(arg4, %r11), \T1
2138 vpxor \T1, \XMM5, \XMM5
2141 vmovdqa \T1, \XMM5
2144 vmovdqu 16*5(arg4, %r11), \T1
2145 vpxor \T1, \XMM6, \XMM6
2148 vmovdqa \T1, \XMM6
2151 vmovdqu 16*6(arg4, %r11), \T1
2152 vpxor \T1, \XMM7, \XMM7
2155 vmovdqa \T1, \XMM7
2158 vmovdqu 16*7(arg4, %r11), \T1
2159 vpxor \T1, \XMM8, \XMM8
2162 vmovdqa \T1, \XMM8
2191 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
2236 vmovdqu (arg1), \T1
2237 vpxor \T1, \XMM1, \XMM1
2238 vpxor \T1, \XMM2, \XMM2
2239 vpxor \T1, \XMM3, \XMM3
2240 vpxor \T1, \XMM4, \XMM4
2241 vpxor \T1, \XMM5, \XMM5
2242 vpxor \T1, \XMM6, \XMM6
2243 vpxor \T1, \XMM7, \XMM7
2244 vpxor \T1, \XMM8, \XMM8
2252 vmovdqu 16*1(arg1), \T1
2253 vaesenc \T1, \XMM1, \XMM1
2254 vaesenc \T1, \XMM2, \XMM2
2255 vaesenc \T1, \XMM3, \XMM3
2256 vaesenc \T1, \XMM4, \XMM4
2257 vaesenc \T1, \XMM5, \XMM5
2258 vaesenc \T1, \XMM6, \XMM6
2259 vaesenc \T1, \XMM7, \XMM7
2260 vaesenc \T1, \XMM8, \XMM8
2262 vmovdqu 16*2(arg1), \T1
2263 vaesenc \T1, \XMM1, \XMM1
2264 vaesenc \T1, \XMM2, \XMM2
2265 vaesenc \T1, \XMM3, \XMM3
2266 vaesenc \T1, \XMM4, \XMM4
2267 vaesenc \T1, \XMM5, \XMM5
2268 vaesenc \T1, \XMM6, \XMM6
2269 vaesenc \T1, \XMM7, \XMM7
2270 vaesenc \T1, \XMM8, \XMM8
2282 vmovdqu 16*3(arg1), \T1
2283 vaesenc \T1, \XMM1, \XMM1
2284 vaesenc \T1, \XMM2, \XMM2
2285 vaesenc \T1, \XMM3, \XMM3
2286 vaesenc \T1, \XMM4, \XMM4
2287 vaesenc \T1, \XMM5, \XMM5
2288 vaesenc \T1, \XMM6, \XMM6
2289 vaesenc \T1, \XMM7, \XMM7
2290 vaesenc \T1, \XMM8, \XMM8
2292 vmovdqa TMP2(%rsp), \T1
2294 vpclmulqdq $0x11, \T5, \T1, \T3
2297 vpclmulqdq $0x00, \T5, \T1, \T3
2300 vpclmulqdq $0x01, \T5, \T1, \T3
2303 vpclmulqdq $0x10, \T5, \T1, \T3
2306 vmovdqu 16*4(arg1), \T1
2307 vaesenc \T1, \XMM1, \XMM1
2308 vaesenc \T1, \XMM2, \XMM2
2309 vaesenc \T1, \XMM3, \XMM3
2310 vaesenc \T1, \XMM4, \XMM4
2311 vaesenc \T1, \XMM5, \XMM5
2312 vaesenc \T1, \XMM6, \XMM6
2313 vaesenc \T1, \XMM7, \XMM7
2314 vaesenc \T1, \XMM8, \XMM8
2318 vmovdqa TMP3(%rsp), \T1
2320 vpclmulqdq $0x11, \T5, \T1, \T3
2323 vpclmulqdq $0x00, \T5, \T1, \T3
2326 vpclmulqdq $0x01, \T5, \T1, \T3
2329 vpclmulqdq $0x10, \T5, \T1, \T3
2332 vmovdqu 16*5(arg1), \T1
2333 vaesenc \T1, \XMM1, \XMM1
2334 vaesenc \T1, \XMM2, \XMM2
2335 vaesenc \T1, \XMM3, \XMM3
2336 vaesenc \T1, \XMM4, \XMM4
2337 vaesenc \T1, \XMM5, \XMM5
2338 vaesenc \T1, \XMM6, \XMM6
2339 vaesenc \T1, \XMM7, \XMM7
2340 vaesenc \T1, \XMM8, \XMM8
2342 vmovdqa TMP4(%rsp), \T1
2344 vpclmulqdq $0x11, \T5, \T1, \T3
2347 vpclmulqdq $0x00, \T5, \T1, \T3
2350 vpclmulqdq $0x01, \T5, \T1, \T3
2353 vpclmulqdq $0x10, \T5, \T1, \T3
2356 vmovdqu 16*6(arg1), \T1
2357 vaesenc \T1, \XMM1, \XMM1
2358 vaesenc \T1, \XMM2, \XMM2
2359 vaesenc \T1, \XMM3, \XMM3
2360 vaesenc \T1, \XMM4, \XMM4
2361 vaesenc \T1, \XMM5, \XMM5
2362 vaesenc \T1, \XMM6, \XMM6
2363 vaesenc \T1, \XMM7, \XMM7
2364 vaesenc \T1, \XMM8, \XMM8
2367 vmovdqa TMP5(%rsp), \T1
2369 vpclmulqdq $0x11, \T5, \T1, \T3
2372 vpclmulqdq $0x00, \T5, \T1, \T3
2375 vpclmulqdq $0x01, \T5, \T1, \T3
2378 vpclmulqdq $0x10, \T5, \T1, \T3
2381 vmovdqu 16*7(arg1), \T1
2382 vaesenc \T1, \XMM1, \XMM1
2383 vaesenc \T1, \XMM2, \XMM2
2384 vaesenc \T1, \XMM3, \XMM3
2385 vaesenc \T1, \XMM4, \XMM4
2386 vaesenc \T1, \XMM5, \XMM5
2387 vaesenc \T1, \XMM6, \XMM6
2388 vaesenc \T1, \XMM7, \XMM7
2389 vaesenc \T1, \XMM8, \XMM8
2391 vmovdqa TMP6(%rsp), \T1
2393 vpclmulqdq $0x11, \T5, \T1, \T3
2396 vpclmulqdq $0x00, \T5, \T1, \T3
2399 vpclmulqdq $0x01, \T5, \T1, \T3
2402 vpclmulqdq $0x10, \T5, \T1, \T3
2405 vmovdqu 16*8(arg1), \T1
2406 vaesenc \T1, \XMM1, \XMM1
2407 vaesenc \T1, \XMM2, \XMM2
2408 vaesenc \T1, \XMM3, \XMM3
2409 vaesenc \T1, \XMM4, \XMM4
2410 vaesenc \T1, \XMM5, \XMM5
2411 vaesenc \T1, \XMM6, \XMM6
2412 vaesenc \T1, \XMM7, \XMM7
2413 vaesenc \T1, \XMM8, \XMM8
2415 vmovdqa TMP7(%rsp), \T1
2417 vpclmulqdq $0x11, \T5, \T1, \T3
2420 vpclmulqdq $0x00, \T5, \T1, \T3
2423 vpclmulqdq $0x01, \T5, \T1, \T3
2426 vpclmulqdq $0x10, \T5, \T1, \T3
2442 vmovdqa TMP8(%rsp), \T1
2445 vpclmulqdq $0x00, \T5, \T1, \T3
2448 vpclmulqdq $0x01, \T5, \T1, \T3
2451 vpclmulqdq $0x10, \T5, \T1, \T3
2454 vpclmulqdq $0x11, \T5, \T1, \T3
2455 vpxor \T3, \T4, \T1
2499 vpxor \T6, \T1, \T1 # accumulate the results in T1:T7
2533 vpxor \T4, \T1, \T1 # the result is in T1
2545 vpxor \T1, \XMM1, \XMM1
2553 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8