Lines Matching refs:T3
291 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
294 vpshufd $0b01001110, \HK, \T3
296 vpxor \HK , \T3, \T3 # T3 = (b1+b0)
300 vpclmulqdq $0x00, \T3, \T2, \T2 # T2 = (a1+a0)*(b1+b0)
304 vpslldq $8, \T2,\T3 # shift-L T3 2 DWs
306 vpxor \T3, \GH, \GH
311 vpslld $30, \GH, \T3 # packed right shifting shift << 30
314 vpxor \T3, \T2, \T2 # xor the shifted versions
325 vpsrld $2,\GH, \T3 # packed left shifting >> 2
327 vpxor \T3, \T2, \T2 # xor the shifted versions
337 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
346 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
352 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
358 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
364 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
370 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
376 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
382 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
397 .macro INITIAL_BLOCKS_AVX num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
416 GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6
462 GHASH_MUL_AVX reg_i, \T2, \T1, \T3, \T4, \T5, \T6
541 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
549 vmovdqa \XMM8, \T3
703 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8…
809 vpclmulqdq $0x11, \T5, \T1, \T3
810 vpxor \T3, \T4, \T4
811 vpclmulqdq $0x00, \T5, \T1, \T3
812 vpxor \T3, \T7, \T7
814 vpshufd $0b01001110, \T1, \T3
815 vpxor \T1, \T3, \T3
817 vpclmulqdq $0x10, \T5, \T3, \T3
818 vpxor \T3, \T6, \T6
834 vpclmulqdq $0x11, \T5, \T1, \T3
835 vpxor \T3, \T4, \T4
836 vpclmulqdq $0x00, \T5, \T1, \T3
837 vpxor \T3, \T7, \T7
839 vpshufd $0b01001110, \T1, \T3
840 vpxor \T1, \T3, \T3
842 vpclmulqdq $0x10, \T5, \T3, \T3
843 vpxor \T3, \T6, \T6
857 vpclmulqdq $0x11, \T5, \T1, \T3
858 vpxor \T3, \T4, \T4
859 vpclmulqdq $0x00, \T5, \T1, \T3
860 vpxor \T3, \T7, \T7
862 vpshufd $0b01001110, \T1, \T3
863 vpxor \T1, \T3, \T3
865 vpclmulqdq $0x10, \T5, \T3, \T3
866 vpxor \T3, \T6, \T6
881 vpclmulqdq $0x11, \T5, \T1, \T3
882 vpxor \T3, \T4, \T4
883 vpclmulqdq $0x00, \T5, \T1, \T3
884 vpxor \T3, \T7, \T7
886 vpshufd $0b01001110, \T1, \T3
887 vpxor \T1, \T3, \T3
889 vpclmulqdq $0x10, \T5, \T3, \T3
890 vpxor \T3, \T6, \T6
904 vpclmulqdq $0x11, \T5, \T1, \T3
905 vpxor \T3, \T4, \T4
906 vpclmulqdq $0x00, \T5, \T1, \T3
907 vpxor \T3, \T7, \T7
909 vpshufd $0b01001110, \T1, \T3
910 vpxor \T1, \T3, \T3
912 vpclmulqdq $0x10, \T5, \T3, \T3
913 vpxor \T3, \T6, \T6
928 vpclmulqdq $0x11, \T5, \T1, \T3
929 vpxor \T3, \T4, \T4
930 vpclmulqdq $0x00, \T5, \T1, \T3
931 vpxor \T3, \T7, \T7
933 vpshufd $0b01001110, \T1, \T3
934 vpxor \T1, \T3, \T3
936 vpclmulqdq $0x10, \T5, \T3, \T3
937 vpxor \T3, \T6, \T6
953 vpclmulqdq $0x11, \T5, \T1, \T3
954 vpxor \T3, \T4, \T4
955 vpclmulqdq $0x00, \T5, \T1, \T3
956 vpxor \T3, \T7, \T7
958 vpshufd $0b01001110, \T1, \T3
959 vpxor \T1, \T3, \T3
961 vpclmulqdq $0x10, \T5, \T3, \T3
962 vpxor \T3, \T6, \T6
977 vaesenclast \T2, reg_j, \T3
979 vmovdqu \T3, 16*i(arg2, %r11)
988 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
990 vpxor \T3, \T7, \T7
999 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1002 vpxor \T3, \T2, \T2 # xor the shifted versions
1024 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1026 vpxor \T3, \T2, \T2 # xor the shifted versions
1052 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1063 vmovdqa HashKey_8_k(arg1), \T3
1064 vpclmulqdq $0x00, \T3, \T2, \XMM1
1077 vmovdqa HashKey_7_k(arg1), \T3
1078 vpclmulqdq $0x00, \T3, \T2, \T2
1092 vmovdqa HashKey_6_k(arg1), \T3
1093 vpclmulqdq $0x00, \T3, \T2, \T2
1107 vmovdqa HashKey_5_k(arg1), \T3
1108 vpclmulqdq $0x00, \T3, \T2, \T2
1122 vmovdqa HashKey_4_k(arg1), \T3
1123 vpclmulqdq $0x00, \T3, \T2, \T2
1137 vmovdqa HashKey_3_k(arg1), \T3
1138 vpclmulqdq $0x00, \T3, \T2, \T2
1152 vmovdqa HashKey_2_k(arg1), \T3
1153 vpclmulqdq $0x00, \T3, \T2, \T2
1167 vmovdqa HashKey_k(arg1), \T3
1168 vpclmulqdq $0x00, \T3, \T2, \T2
1187 vpslld $30, \T7, \T3 # packed right shifting shift << 30
1190 vpxor \T3, \T2, \T2 # xor the shifted versions
1202 vpsrld $2, \T7, \T3 # packed left shifting >> 2
1204 vpxor \T3, \T2, \T2 # xor the shifted versions
1630 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1634 vpclmulqdq $0x01,\HK,\GH,\T3 # T3 = a1*b0
1636 vpxor \T3, \GH, \GH
1639 vpsrldq $8 , \GH, \T3 # shift-R GH 2 DWs
1642 vpxor \T3, \T1, \T1
1647 vmovdqa POLY2(%rip), \T3
1649 vpclmulqdq $0x01, \GH, \T3, \T2
1655 vpclmulqdq $0x00, \GH, \T3, \T2
1658 vpclmulqdq $0x10, \GH, \T3, \GH
1668 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1672 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1675 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1678 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1681 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1684 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1687 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1690 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1703 .macro INITIAL_BLOCKS_AVX2 num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
1723 GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6
1769 GHASH_MUL_AVX2 reg_i, \T2, \T1, \T3, \T4, \T5, \T6
1849 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
1857 vmovdqa \XMM8, \T3
2015 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM…
2118 vpclmulqdq $0x11, \T5, \T1, \T3
2119 vpxor \T3, \T4, \T4
2121 vpclmulqdq $0x00, \T5, \T1, \T3
2122 vpxor \T3, \T7, \T7
2124 vpclmulqdq $0x01, \T5, \T1, \T3
2125 vpxor \T3, \T6, \T6
2127 vpclmulqdq $0x10, \T5, \T1, \T3
2128 vpxor \T3, \T6, \T6
2144 vpclmulqdq $0x11, \T5, \T1, \T3
2145 vpxor \T3, \T4, \T4
2147 vpclmulqdq $0x00, \T5, \T1, \T3
2148 vpxor \T3, \T7, \T7
2150 vpclmulqdq $0x01, \T5, \T1, \T3
2151 vpxor \T3, \T6, \T6
2153 vpclmulqdq $0x10, \T5, \T1, \T3
2154 vpxor \T3, \T6, \T6
2168 vpclmulqdq $0x11, \T5, \T1, \T3
2169 vpxor \T3, \T4, \T4
2171 vpclmulqdq $0x00, \T5, \T1, \T3
2172 vpxor \T3, \T7, \T7
2174 vpclmulqdq $0x01, \T5, \T1, \T3
2175 vpxor \T3, \T6, \T6
2177 vpclmulqdq $0x10, \T5, \T1, \T3
2178 vpxor \T3, \T6, \T6
2193 vpclmulqdq $0x11, \T5, \T1, \T3
2194 vpxor \T3, \T4, \T4
2196 vpclmulqdq $0x00, \T5, \T1, \T3
2197 vpxor \T3, \T7, \T7
2199 vpclmulqdq $0x01, \T5, \T1, \T3
2200 vpxor \T3, \T6, \T6
2202 vpclmulqdq $0x10, \T5, \T1, \T3
2203 vpxor \T3, \T6, \T6
2217 vpclmulqdq $0x11, \T5, \T1, \T3
2218 vpxor \T3, \T4, \T4
2220 vpclmulqdq $0x00, \T5, \T1, \T3
2221 vpxor \T3, \T7, \T7
2223 vpclmulqdq $0x01, \T5, \T1, \T3
2224 vpxor \T3, \T6, \T6
2226 vpclmulqdq $0x10, \T5, \T1, \T3
2227 vpxor \T3, \T6, \T6
2241 vpclmulqdq $0x11, \T5, \T1, \T3
2242 vpxor \T3, \T4, \T4
2244 vpclmulqdq $0x00, \T5, \T1, \T3
2245 vpxor \T3, \T7, \T7
2247 vpclmulqdq $0x01, \T5, \T1, \T3
2248 vpxor \T3, \T6, \T6
2250 vpclmulqdq $0x10, \T5, \T1, \T3
2251 vpxor \T3, \T6, \T6
2269 vpclmulqdq $0x00, \T5, \T1, \T3
2270 vpxor \T3, \T7, \T7
2272 vpclmulqdq $0x01, \T5, \T1, \T3
2273 vpxor \T3, \T6, \T6
2275 vpclmulqdq $0x10, \T5, \T1, \T3
2276 vpxor \T3, \T6, \T6
2278 vpclmulqdq $0x11, \T5, \T1, \T3
2279 vpxor \T3, \T4, \T1
2292 vaesenclast \T2, reg_j, \T3
2294 vmovdqu \T3, 16*i(arg2, %r11)
2303 vpslldq $8, \T6, \T3 # shift-L T3 2 DWs
2305 vpxor \T3, \T7, \T7
2312 vmovdqa POLY2(%rip), \T3
2314 vpclmulqdq $0x01, \T7, \T3, \T2
2332 vpclmulqdq $0x00, \T7, \T3, \T2
2335 vpclmulqdq $0x10, \T7, \T3, \T4
2360 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
2367 vpshufd $0b01001110, \T5, \T3
2369 vpxor \T5, \T3, \T3
2374 vpclmulqdq $0x00, \T3, \T2, \XMM1
2380 vpshufd $0b01001110, \T5, \T3
2382 vpxor \T5, \T3, \T3
2390 vpclmulqdq $0x00, \T3, \T2, \T2
2398 vpshufd $0b01001110, \T5, \T3
2400 vpxor \T5, \T3, \T3
2408 vpclmulqdq $0x00, \T3, \T2, \T2
2416 vpshufd $0b01001110, \T5, \T3
2418 vpxor \T5, \T3, \T3
2426 vpclmulqdq $0x00, \T3, \T2, \T2
2434 vpshufd $0b01001110, \T5, \T3
2436 vpxor \T5, \T3, \T3
2444 vpclmulqdq $0x00, \T3, \T2, \T2
2452 vpshufd $0b01001110, \T5, \T3
2454 vpxor \T5, \T3, \T3
2462 vpclmulqdq $0x00, \T3, \T2, \T2
2470 vpshufd $0b01001110, \T5, \T3
2472 vpxor \T5, \T3, \T3
2480 vpclmulqdq $0x00, \T3, \T2, \T2
2488 vpshufd $0b01001110, \T5, \T3
2490 vpxor \T5, \T3, \T3
2498 vpclmulqdq $0x00, \T3, \T2, \T2
2516 vmovdqa POLY2(%rip), \T3
2518 vpclmulqdq $0x01, \T7, \T3, \T2
2526 vpclmulqdq $0x00, \T7, \T3, \T2
2529 vpclmulqdq $0x10, \T7, \T3, \T4