Lines Matching refs:T1

595 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
611 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
629 movq (%r10), \T1
632 vpslldq $8, \T1, \T1
634 vpxor \T1, \T7, \T7
640 movq %rax, \T1
643 vpslldq $12, \T1, \T1
645 vpxor \T1, \T7, \T7
652 vmovdqu aad_shift_arr(%r11), \T1
653 vpshufb \T1, \T7, \T7
657 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
885 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
892 vpclmulqdq $0x11, \HK, \GH, \T1 # T1 = a1*b1
896 vpxor \T1, \T2,\T2 # T2 = a0*b1+a1*b0
901 vpxor \T2, \T1, \T1 # <T1:GH> = GH x HK
926 vpxor \T1, \GH, \GH # the result is in GH
931 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
936 vpshufd $0b01001110, \T5, \T1
937 vpxor \T5, \T1, \T1
938 vmovdqu \T1, HashKey_k(arg2)
940 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
942 vpshufd $0b01001110, \T5, \T1
943 vpxor \T5, \T1, \T1
944 vmovdqu \T1, HashKey_2_k(arg2)
946 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
948 vpshufd $0b01001110, \T5, \T1
949 vpxor \T5, \T1, \T1
950 vmovdqu \T1, HashKey_3_k(arg2)
952 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
954 vpshufd $0b01001110, \T5, \T1
955 vpxor \T5, \T1, \T1
956 vmovdqu \T1, HashKey_4_k(arg2)
958 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
960 vpshufd $0b01001110, \T5, \T1
961 vpxor \T5, \T1, \T1
962 vmovdqu \T1, HashKey_5_k(arg2)
964 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
966 vpshufd $0b01001110, \T5, \T1
967 vpxor \T5, \T1, \T1
968 vmovdqu \T1, HashKey_6_k(arg2)
970 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
972 vpshufd $0b01001110, \T5, \T1
973 vpxor \T5, \T1, \T1
974 vmovdqu \T1, HashKey_7_k(arg2)
976 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
978 vpshufd $0b01001110, \T5, \T1
979 vpxor \T5, \T1, \T1
980 vmovdqu \T1, HashKey_8_k(arg2)
991 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 X…
1046 vmovdqu (arg4, %r11), \T1
1047 vpxor \T1, reg_i, reg_i
1051 vmovdqa \T1, reg_i
1065 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1148 vmovdqu (arg4, %r11), \T1
1149 vpxor \T1, \XMM1, \XMM1
1152 vmovdqa \T1, \XMM1
1155 vmovdqu 16*1(arg4, %r11), \T1
1156 vpxor \T1, \XMM2, \XMM2
1159 vmovdqa \T1, \XMM2
1162 vmovdqu 16*2(arg4, %r11), \T1
1163 vpxor \T1, \XMM3, \XMM3
1166 vmovdqa \T1, \XMM3
1169 vmovdqu 16*3(arg4, %r11), \T1
1170 vpxor \T1, \XMM4, \XMM4
1173 vmovdqa \T1, \XMM4
1176 vmovdqu 16*4(arg4, %r11), \T1
1177 vpxor \T1, \XMM5, \XMM5
1180 vmovdqa \T1, \XMM5
1183 vmovdqu 16*5(arg4, %r11), \T1
1184 vpxor \T1, \XMM6, \XMM6
1187 vmovdqa \T1, \XMM6
1190 vmovdqu 16*6(arg4, %r11), \T1
1191 vpxor \T1, \XMM7, \XMM7
1194 vmovdqa \T1, \XMM7
1197 vmovdqu 16*7(arg4, %r11), \T1
1198 vpxor \T1, \XMM8, \XMM8
1201 vmovdqa \T1, \XMM8
1226 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
1271 vmovdqu (arg1), \T1
1272 vpxor \T1, \XMM1, \XMM1
1273 vpxor \T1, \XMM2, \XMM2
1274 vpxor \T1, \XMM3, \XMM3
1275 vpxor \T1, \XMM4, \XMM4
1276 vpxor \T1, \XMM5, \XMM5
1277 vpxor \T1, \XMM6, \XMM6
1278 vpxor \T1, \XMM7, \XMM7
1279 vpxor \T1, \XMM8, \XMM8
1287 vmovdqu 16*1(arg1), \T1
1288 vaesenc \T1, \XMM1, \XMM1
1289 vaesenc \T1, \XMM2, \XMM2
1290 vaesenc \T1, \XMM3, \XMM3
1291 vaesenc \T1, \XMM4, \XMM4
1292 vaesenc \T1, \XMM5, \XMM5
1293 vaesenc \T1, \XMM6, \XMM6
1294 vaesenc \T1, \XMM7, \XMM7
1295 vaesenc \T1, \XMM8, \XMM8
1297 vmovdqu 16*2(arg1), \T1
1298 vaesenc \T1, \XMM1, \XMM1
1299 vaesenc \T1, \XMM2, \XMM2
1300 vaesenc \T1, \XMM3, \XMM3
1301 vaesenc \T1, \XMM4, \XMM4
1302 vaesenc \T1, \XMM5, \XMM5
1303 vaesenc \T1, \XMM6, \XMM6
1304 vaesenc \T1, \XMM7, \XMM7
1305 vaesenc \T1, \XMM8, \XMM8
1320 vmovdqu 16*3(arg1), \T1
1321 vaesenc \T1, \XMM1, \XMM1
1322 vaesenc \T1, \XMM2, \XMM2
1323 vaesenc \T1, \XMM3, \XMM3
1324 vaesenc \T1, \XMM4, \XMM4
1325 vaesenc \T1, \XMM5, \XMM5
1326 vaesenc \T1, \XMM6, \XMM6
1327 vaesenc \T1, \XMM7, \XMM7
1328 vaesenc \T1, \XMM8, \XMM8
1330 vmovdqa TMP2(%rsp), \T1
1332 vpclmulqdq $0x11, \T5, \T1, \T3
1334 vpclmulqdq $0x00, \T5, \T1, \T3
1337 vpshufd $0b01001110, \T1, \T3
1338 vpxor \T1, \T3, \T3
1343 vmovdqu 16*4(arg1), \T1
1344 vaesenc \T1, \XMM1, \XMM1
1345 vaesenc \T1, \XMM2, \XMM2
1346 vaesenc \T1, \XMM3, \XMM3
1347 vaesenc \T1, \XMM4, \XMM4
1348 vaesenc \T1, \XMM5, \XMM5
1349 vaesenc \T1, \XMM6, \XMM6
1350 vaesenc \T1, \XMM7, \XMM7
1351 vaesenc \T1, \XMM8, \XMM8
1355 vmovdqa TMP3(%rsp), \T1
1357 vpclmulqdq $0x11, \T5, \T1, \T3
1359 vpclmulqdq $0x00, \T5, \T1, \T3
1362 vpshufd $0b01001110, \T1, \T3
1363 vpxor \T1, \T3, \T3
1368 vmovdqu 16*5(arg1), \T1
1369 vaesenc \T1, \XMM1, \XMM1
1370 vaesenc \T1, \XMM2, \XMM2
1371 vaesenc \T1, \XMM3, \XMM3
1372 vaesenc \T1, \XMM4, \XMM4
1373 vaesenc \T1, \XMM5, \XMM5
1374 vaesenc \T1, \XMM6, \XMM6
1375 vaesenc \T1, \XMM7, \XMM7
1376 vaesenc \T1, \XMM8, \XMM8
1378 vmovdqa TMP4(%rsp), \T1
1380 vpclmulqdq $0x11, \T5, \T1, \T3
1382 vpclmulqdq $0x00, \T5, \T1, \T3
1385 vpshufd $0b01001110, \T1, \T3
1386 vpxor \T1, \T3, \T3
1391 vmovdqu 16*6(arg1), \T1
1392 vaesenc \T1, \XMM1, \XMM1
1393 vaesenc \T1, \XMM2, \XMM2
1394 vaesenc \T1, \XMM3, \XMM3
1395 vaesenc \T1, \XMM4, \XMM4
1396 vaesenc \T1, \XMM5, \XMM5
1397 vaesenc \T1, \XMM6, \XMM6
1398 vaesenc \T1, \XMM7, \XMM7
1399 vaesenc \T1, \XMM8, \XMM8
1402 vmovdqa TMP5(%rsp), \T1
1404 vpclmulqdq $0x11, \T5, \T1, \T3
1406 vpclmulqdq $0x00, \T5, \T1, \T3
1409 vpshufd $0b01001110, \T1, \T3
1410 vpxor \T1, \T3, \T3
1415 vmovdqu 16*7(arg1), \T1
1416 vaesenc \T1, \XMM1, \XMM1
1417 vaesenc \T1, \XMM2, \XMM2
1418 vaesenc \T1, \XMM3, \XMM3
1419 vaesenc \T1, \XMM4, \XMM4
1420 vaesenc \T1, \XMM5, \XMM5
1421 vaesenc \T1, \XMM6, \XMM6
1422 vaesenc \T1, \XMM7, \XMM7
1423 vaesenc \T1, \XMM8, \XMM8
1425 vmovdqa TMP6(%rsp), \T1
1427 vpclmulqdq $0x11, \T5, \T1, \T3
1429 vpclmulqdq $0x00, \T5, \T1, \T3
1432 vpshufd $0b01001110, \T1, \T3
1433 vpxor \T1, \T3, \T3
1439 vmovdqu 16*8(arg1), \T1
1440 vaesenc \T1, \XMM1, \XMM1
1441 vaesenc \T1, \XMM2, \XMM2
1442 vaesenc \T1, \XMM3, \XMM3
1443 vaesenc \T1, \XMM4, \XMM4
1444 vaesenc \T1, \XMM5, \XMM5
1445 vaesenc \T1, \XMM6, \XMM6
1446 vaesenc \T1, \XMM7, \XMM7
1447 vaesenc \T1, \XMM8, \XMM8
1449 vmovdqa TMP7(%rsp), \T1
1451 vpclmulqdq $0x11, \T5, \T1, \T3
1453 vpclmulqdq $0x00, \T5, \T1, \T3
1456 vpshufd $0b01001110, \T1, \T3
1457 vpxor \T1, \T3, \T3
1474 vmovdqa TMP8(%rsp), \T1
1476 vpclmulqdq $0x11, \T5, \T1, \T3
1478 vpclmulqdq $0x00, \T5, \T1, \T3
1481 vpshufd $0b01001110, \T1, \T3
1482 vpxor \T1, \T3, \T3
1546 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1570 vpxor \T1, \T2, \T2
1593 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1734 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1748 vpxor \T1, \T2, \T2
1867 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1869 vpclmulqdq $0x11,\HK,\GH,\T1 # T1 = a1*b1
1879 vpxor \T3, \T1, \T1
1900 vpxor \T1, \GH, \GH # the result is in GH
1905 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1909 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1912 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1915 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1918 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1921 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1924 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1927 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1939 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 …
1995 vmovdqu (arg4, %r11), \T1
1996 vpxor \T1, reg_i, reg_i
2001 vmovdqa \T1, reg_i
2015 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
2099 vmovdqu (arg4, %r11), \T1
2100 vpxor \T1, \XMM1, \XMM1
2103 vmovdqa \T1, \XMM1
2106 vmovdqu 16*1(arg4, %r11), \T1
2107 vpxor \T1, \XMM2, \XMM2
2110 vmovdqa \T1, \XMM2
2113 vmovdqu 16*2(arg4, %r11), \T1
2114 vpxor \T1, \XMM3, \XMM3
2117 vmovdqa \T1, \XMM3
2120 vmovdqu 16*3(arg4, %r11), \T1
2121 vpxor \T1, \XMM4, \XMM4
2124 vmovdqa \T1, \XMM4
2127 vmovdqu 16*4(arg4, %r11), \T1
2128 vpxor \T1, \XMM5, \XMM5
2131 vmovdqa \T1, \XMM5
2134 vmovdqu 16*5(arg4, %r11), \T1
2135 vpxor \T1, \XMM6, \XMM6
2138 vmovdqa \T1, \XMM6
2141 vmovdqu 16*6(arg4, %r11), \T1
2142 vpxor \T1, \XMM7, \XMM7
2145 vmovdqa \T1, \XMM7
2148 vmovdqu 16*7(arg4, %r11), \T1
2149 vpxor \T1, \XMM8, \XMM8
2152 vmovdqa \T1, \XMM8
2181 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
2226 vmovdqu (arg1), \T1
2227 vpxor \T1, \XMM1, \XMM1
2228 vpxor \T1, \XMM2, \XMM2
2229 vpxor \T1, \XMM3, \XMM3
2230 vpxor \T1, \XMM4, \XMM4
2231 vpxor \T1, \XMM5, \XMM5
2232 vpxor \T1, \XMM6, \XMM6
2233 vpxor \T1, \XMM7, \XMM7
2234 vpxor \T1, \XMM8, \XMM8
2242 vmovdqu 16*1(arg1), \T1
2243 vaesenc \T1, \XMM1, \XMM1
2244 vaesenc \T1, \XMM2, \XMM2
2245 vaesenc \T1, \XMM3, \XMM3
2246 vaesenc \T1, \XMM4, \XMM4
2247 vaesenc \T1, \XMM5, \XMM5
2248 vaesenc \T1, \XMM6, \XMM6
2249 vaesenc \T1, \XMM7, \XMM7
2250 vaesenc \T1, \XMM8, \XMM8
2252 vmovdqu 16*2(arg1), \T1
2253 vaesenc \T1, \XMM1, \XMM1
2254 vaesenc \T1, \XMM2, \XMM2
2255 vaesenc \T1, \XMM3, \XMM3
2256 vaesenc \T1, \XMM4, \XMM4
2257 vaesenc \T1, \XMM5, \XMM5
2258 vaesenc \T1, \XMM6, \XMM6
2259 vaesenc \T1, \XMM7, \XMM7
2260 vaesenc \T1, \XMM8, \XMM8
2272 vmovdqu 16*3(arg1), \T1
2273 vaesenc \T1, \XMM1, \XMM1
2274 vaesenc \T1, \XMM2, \XMM2
2275 vaesenc \T1, \XMM3, \XMM3
2276 vaesenc \T1, \XMM4, \XMM4
2277 vaesenc \T1, \XMM5, \XMM5
2278 vaesenc \T1, \XMM6, \XMM6
2279 vaesenc \T1, \XMM7, \XMM7
2280 vaesenc \T1, \XMM8, \XMM8
2282 vmovdqa TMP2(%rsp), \T1
2284 vpclmulqdq $0x11, \T5, \T1, \T3
2287 vpclmulqdq $0x00, \T5, \T1, \T3
2290 vpclmulqdq $0x01, \T5, \T1, \T3
2293 vpclmulqdq $0x10, \T5, \T1, \T3
2296 vmovdqu 16*4(arg1), \T1
2297 vaesenc \T1, \XMM1, \XMM1
2298 vaesenc \T1, \XMM2, \XMM2
2299 vaesenc \T1, \XMM3, \XMM3
2300 vaesenc \T1, \XMM4, \XMM4
2301 vaesenc \T1, \XMM5, \XMM5
2302 vaesenc \T1, \XMM6, \XMM6
2303 vaesenc \T1, \XMM7, \XMM7
2304 vaesenc \T1, \XMM8, \XMM8
2308 vmovdqa TMP3(%rsp), \T1
2310 vpclmulqdq $0x11, \T5, \T1, \T3
2313 vpclmulqdq $0x00, \T5, \T1, \T3
2316 vpclmulqdq $0x01, \T5, \T1, \T3
2319 vpclmulqdq $0x10, \T5, \T1, \T3
2322 vmovdqu 16*5(arg1), \T1
2323 vaesenc \T1, \XMM1, \XMM1
2324 vaesenc \T1, \XMM2, \XMM2
2325 vaesenc \T1, \XMM3, \XMM3
2326 vaesenc \T1, \XMM4, \XMM4
2327 vaesenc \T1, \XMM5, \XMM5
2328 vaesenc \T1, \XMM6, \XMM6
2329 vaesenc \T1, \XMM7, \XMM7
2330 vaesenc \T1, \XMM8, \XMM8
2332 vmovdqa TMP4(%rsp), \T1
2334 vpclmulqdq $0x11, \T5, \T1, \T3
2337 vpclmulqdq $0x00, \T5, \T1, \T3
2340 vpclmulqdq $0x01, \T5, \T1, \T3
2343 vpclmulqdq $0x10, \T5, \T1, \T3
2346 vmovdqu 16*6(arg1), \T1
2347 vaesenc \T1, \XMM1, \XMM1
2348 vaesenc \T1, \XMM2, \XMM2
2349 vaesenc \T1, \XMM3, \XMM3
2350 vaesenc \T1, \XMM4, \XMM4
2351 vaesenc \T1, \XMM5, \XMM5
2352 vaesenc \T1, \XMM6, \XMM6
2353 vaesenc \T1, \XMM7, \XMM7
2354 vaesenc \T1, \XMM8, \XMM8
2357 vmovdqa TMP5(%rsp), \T1
2359 vpclmulqdq $0x11, \T5, \T1, \T3
2362 vpclmulqdq $0x00, \T5, \T1, \T3
2365 vpclmulqdq $0x01, \T5, \T1, \T3
2368 vpclmulqdq $0x10, \T5, \T1, \T3
2371 vmovdqu 16*7(arg1), \T1
2372 vaesenc \T1, \XMM1, \XMM1
2373 vaesenc \T1, \XMM2, \XMM2
2374 vaesenc \T1, \XMM3, \XMM3
2375 vaesenc \T1, \XMM4, \XMM4
2376 vaesenc \T1, \XMM5, \XMM5
2377 vaesenc \T1, \XMM6, \XMM6
2378 vaesenc \T1, \XMM7, \XMM7
2379 vaesenc \T1, \XMM8, \XMM8
2381 vmovdqa TMP6(%rsp), \T1
2383 vpclmulqdq $0x11, \T5, \T1, \T3
2386 vpclmulqdq $0x00, \T5, \T1, \T3
2389 vpclmulqdq $0x01, \T5, \T1, \T3
2392 vpclmulqdq $0x10, \T5, \T1, \T3
2395 vmovdqu 16*8(arg1), \T1
2396 vaesenc \T1, \XMM1, \XMM1
2397 vaesenc \T1, \XMM2, \XMM2
2398 vaesenc \T1, \XMM3, \XMM3
2399 vaesenc \T1, \XMM4, \XMM4
2400 vaesenc \T1, \XMM5, \XMM5
2401 vaesenc \T1, \XMM6, \XMM6
2402 vaesenc \T1, \XMM7, \XMM7
2403 vaesenc \T1, \XMM8, \XMM8
2405 vmovdqa TMP7(%rsp), \T1
2407 vpclmulqdq $0x11, \T5, \T1, \T3
2410 vpclmulqdq $0x00, \T5, \T1, \T3
2413 vpclmulqdq $0x01, \T5, \T1, \T3
2416 vpclmulqdq $0x10, \T5, \T1, \T3
2432 vmovdqa TMP8(%rsp), \T1
2435 vpclmulqdq $0x00, \T5, \T1, \T3
2438 vpclmulqdq $0x01, \T5, \T1, \T3
2441 vpclmulqdq $0x10, \T5, \T1, \T3
2444 vpclmulqdq $0x11, \T5, \T1, \T3
2445 vpxor \T3, \T4, \T1
2489 vpxor \T6, \T1, \T1 # accumulate the results in T1:T7
2523 vpxor \T4, \T1, \T1 # the result is in T1
2535 vpxor \T1, \XMM1, \XMM1
2543 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8