Lines Matching full:t3
595 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
611 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
657 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
885 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
888 vpshufd $0b01001110, \HK, \T3
890 vpxor \HK , \T3, \T3 # T3 = (b1+b0)
894 vpclmulqdq $0x00, \T3, \T2, \T2 # T2 = (a1+a0)*(b1+b0)
898 vpslldq $8, \T2,\T3 # shift-L T3 2 DWs
900 vpxor \T3, \GH, \GH
905 vpslld $30, \GH, \T3 # packed right shifting shift << 30
908 vpxor \T3, \T2, \T2 # xor the shifted versions
919 vpsrld $2,\GH, \T3 # packed left shifting >> 2
921 vpxor \T3, \T2, \T2 # xor the shifted versions
931 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
940 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
946 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
952 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
958 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
964 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
970 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
976 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
991 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 X…
1065 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1073 vmovdqa \XMM8, \T3
1226 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
1332 vpclmulqdq $0x11, \T5, \T1, \T3
1333 vpxor \T3, \T4, \T4
1334 vpclmulqdq $0x00, \T5, \T1, \T3
1335 vpxor \T3, \T7, \T7
1337 vpshufd $0b01001110, \T1, \T3
1338 vpxor \T1, \T3, \T3
1340 vpclmulqdq $0x10, \T5, \T3, \T3
1341 vpxor \T3, \T6, \T6
1357 vpclmulqdq $0x11, \T5, \T1, \T3
1358 vpxor \T3, \T4, \T4
1359 vpclmulqdq $0x00, \T5, \T1, \T3
1360 vpxor \T3, \T7, \T7
1362 vpshufd $0b01001110, \T1, \T3
1363 vpxor \T1, \T3, \T3
1365 vpclmulqdq $0x10, \T5, \T3, \T3
1366 vpxor \T3, \T6, \T6
1380 vpclmulqdq $0x11, \T5, \T1, \T3
1381 vpxor \T3, \T4, \T4
1382 vpclmulqdq $0x00, \T5, \T1, \T3
1383 vpxor \T3, \T7, \T7
1385 vpshufd $0b01001110, \T1, \T3
1386 vpxor \T1, \T3, \T3
1388 vpclmulqdq $0x10, \T5, \T3, \T3
1389 vpxor \T3, \T6, \T6
1404 vpclmulqdq $0x11, \T5, \T1, \T3
1405 vpxor \T3, \T4, \T4
1406 vpclmulqdq $0x00, \T5, \T1, \T3
1407 vpxor \T3, \T7, \T7
1409 vpshufd $0b01001110, \T1, \T3
1410 vpxor \T1, \T3, \T3
1412 vpclmulqdq $0x10, \T5, \T3, \T3
1413 vpxor \T3, \T6, \T6
1427 vpclmulqdq $0x11, \T5, \T1, \T3
1428 vpxor \T3, \T4, \T4
1429 vpclmulqdq $0x00, \T5, \T1, \T3
1430 vpxor \T3, \T7, \T7
1432 vpshufd $0b01001110, \T1, \T3
1433 vpxor \T1, \T3, \T3
1435 vpclmulqdq $0x10, \T5, \T3, \T3
1436 vpxor \T3, \T6, \T6
1451 vpclmulqdq $0x11, \T5, \T1, \T3
1452 vpxor \T3, \T4, \T4
1453 vpclmulqdq $0x00, \T5, \T1, \T3
1454 vpxor \T3, \T7, \T7
1456 vpshufd $0b01001110, \T1, \T3
1457 vpxor \T1, \T3, \T3
1459 vpclmulqdq $0x10, \T5, \T3, \T3
1460 vpxor \T3, \T6, \T6
1476 vpclmulqdq $0x11, \T5, \T1, \T3
1477 vpxor \T3, \T4, \T4
1478 vpclmulqdq $0x00, \T5, \T1, \T3
1479 vpxor \T3, \T7, \T7
1481 vpshufd $0b01001110, \T1, \T3
1482 vpxor \T1, \T3, \T3
1484 vpclmulqdq $0x10, \T5, \T3, \T3
1485 vpxor \T3, \T6, \T6
1518 vaesenclast \T2, reg_j, \T3
1520 vmovdqu \T3, 16*i(arg3, %r11)
1529 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
1531 vpxor \T3, \T7, \T7
1540 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1543 vpxor \T3, \T2, \T2 # xor the shifted versions
1565 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1567 vpxor \T3, \T2, \T2 # xor the shifted versions
1593 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1604 vmovdqu HashKey_8_k(arg2), \T3
1605 vpclmulqdq $0x00, \T3, \T2, \XMM1
1618 vmovdqu HashKey_7_k(arg2), \T3
1619 vpclmulqdq $0x00, \T3, \T2, \T2
1633 vmovdqu HashKey_6_k(arg2), \T3
1634 vpclmulqdq $0x00, \T3, \T2, \T2
1648 vmovdqu HashKey_5_k(arg2), \T3
1649 vpclmulqdq $0x00, \T3, \T2, \T2
1663 vmovdqu HashKey_4_k(arg2), \T3
1664 vpclmulqdq $0x00, \T3, \T2, \T2
1678 vmovdqu HashKey_3_k(arg2), \T3
1679 vpclmulqdq $0x00, \T3, \T2, \T2
1693 vmovdqu HashKey_2_k(arg2), \T3
1694 vpclmulqdq $0x00, \T3, \T2, \T2
1708 vmovdqu HashKey_k(arg2), \T3
1709 vpclmulqdq $0x00, \T3, \T2, \T2
1728 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1731 vpxor \T3, \T2, \T2 # xor the shifted versions
1743 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1745 vpxor \T3, \T2, \T2 # xor the shifted versions
1867 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1871 vpclmulqdq $0x01,\HK,\GH,\T3 # T3 = a1*b0
1873 vpxor \T3, \GH, \GH
1876 vpsrldq $8 , \GH, \T3 # shift-R GH 2 DWs
1879 vpxor \T3, \T1, \T1
1884 vmovdqa POLY2(%rip), \T3
1886 vpclmulqdq $0x01, \GH, \T3, \T2
1892 vpclmulqdq $0x00, \GH, \T3, \T2
1895 vpclmulqdq $0x10, \GH, \T3, \GH
1905 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1909 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1912 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1915 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1918 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1921 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1924 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1927 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1939 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 …
2015 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
2023 vmovdqa \XMM8, \T3
2181 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
2284 vpclmulqdq $0x11, \T5, \T1, \T3
2285 vpxor \T3, \T4, \T4
2287 vpclmulqdq $0x00, \T5, \T1, \T3
2288 vpxor \T3, \T7, \T7
2290 vpclmulqdq $0x01, \T5, \T1, \T3
2291 vpxor \T3, \T6, \T6
2293 vpclmulqdq $0x10, \T5, \T1, \T3
2294 vpxor \T3, \T6, \T6
2310 vpclmulqdq $0x11, \T5, \T1, \T3
2311 vpxor \T3, \T4, \T4
2313 vpclmulqdq $0x00, \T5, \T1, \T3
2314 vpxor \T3, \T7, \T7
2316 vpclmulqdq $0x01, \T5, \T1, \T3
2317 vpxor \T3, \T6, \T6
2319 vpclmulqdq $0x10, \T5, \T1, \T3
2320 vpxor \T3, \T6, \T6
2334 vpclmulqdq $0x11, \T5, \T1, \T3
2335 vpxor \T3, \T4, \T4
2337 vpclmulqdq $0x00, \T5, \T1, \T3
2338 vpxor \T3, \T7, \T7
2340 vpclmulqdq $0x01, \T5, \T1, \T3
2341 vpxor \T3, \T6, \T6
2343 vpclmulqdq $0x10, \T5, \T1, \T3
2344 vpxor \T3, \T6, \T6
2359 vpclmulqdq $0x11, \T5, \T1, \T3
2360 vpxor \T3, \T4, \T4
2362 vpclmulqdq $0x00, \T5, \T1, \T3
2363 vpxor \T3, \T7, \T7
2365 vpclmulqdq $0x01, \T5, \T1, \T3
2366 vpxor \T3, \T6, \T6
2368 vpclmulqdq $0x10, \T5, \T1, \T3
2369 vpxor \T3, \T6, \T6
2383 vpclmulqdq $0x11, \T5, \T1, \T3
2384 vpxor \T3, \T4, \T4
2386 vpclmulqdq $0x00, \T5, \T1, \T3
2387 vpxor \T3, \T7, \T7
2389 vpclmulqdq $0x01, \T5, \T1, \T3
2390 vpxor \T3, \T6, \T6
2392 vpclmulqdq $0x10, \T5, \T1, \T3
2393 vpxor \T3, \T6, \T6
2407 vpclmulqdq $0x11, \T5, \T1, \T3
2408 vpxor \T3, \T4, \T4
2410 vpclmulqdq $0x00, \T5, \T1, \T3
2411 vpxor \T3, \T7, \T7
2413 vpclmulqdq $0x01, \T5, \T1, \T3
2414 vpxor \T3, \T6, \T6
2416 vpclmulqdq $0x10, \T5, \T1, \T3
2417 vpxor \T3, \T6, \T6
2435 vpclmulqdq $0x00, \T5, \T1, \T3
2436 vpxor \T3, \T7, \T7
2438 vpclmulqdq $0x01, \T5, \T1, \T3
2439 vpxor \T3, \T6, \T6
2441 vpclmulqdq $0x10, \T5, \T1, \T3
2442 vpxor \T3, \T6, \T6
2444 vpclmulqdq $0x11, \T5, \T1, \T3
2445 vpxor \T3, \T4, \T1
2475 vaesenclast \T2, reg_j, \T3
2477 vmovdqu \T3, 16*i(arg3, %r11)
2486 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
2488 vpxor \T3, \T7, \T7
2495 vmovdqa POLY2(%rip), \T3
2497 vpclmulqdq $0x01, \T7, \T3, \T2
2515 vpclmulqdq $0x00, \T7, \T3, \T2
2518 vpclmulqdq $0x10, \T7, \T3, \T4
2543 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2550 vpshufd $0b01001110, \T5, \T3
2552 vpxor \T5, \T3, \T3
2557 vpclmulqdq $0x00, \T3, \T2, \XMM1
2563 vpshufd $0b01001110, \T5, \T3
2565 vpxor \T5, \T3, \T3
2573 vpclmulqdq $0x00, \T3, \T2, \T2
2581 vpshufd $0b01001110, \T5, \T3
2583 vpxor \T5, \T3, \T3
2591 vpclmulqdq $0x00, \T3, \T2, \T2
2599 vpshufd $0b01001110, \T5, \T3
2601 vpxor \T5, \T3, \T3
2609 vpclmulqdq $0x00, \T3, \T2, \T2
2617 vpshufd $0b01001110, \T5, \T3
2619 vpxor \T5, \T3, \T3
2627 vpclmulqdq $0x00, \T3, \T2, \T2
2635 vpshufd $0b01001110, \T5, \T3
2637 vpxor \T5, \T3, \T3
2645 vpclmulqdq $0x00, \T3, \T2, \T2
2653 vpshufd $0b01001110, \T5, \T3
2655 vpxor \T5, \T3, \T3
2663 vpclmulqdq $0x00, \T3, \T2, \T2
2671 vpshufd $0b01001110, \T5, \T3
2673 vpxor \T5, \T3, \T3
2681 vpclmulqdq $0x00, \T3, \T2, \T2
2699 vmovdqa POLY2(%rip), \T3
2701 vpclmulqdq $0x01, \T7, \T3, \T2
2709 vpclmulqdq $0x00, \T7, \T3, \T2
2712 vpclmulqdq $0x10, \T7, \T3, \T4