Lines Matching full:rx0
21 #define RX0 %xmm0 macro
190 vbroadcastss (4*(round))(%rdi), RX0; \
191 vpxor s1, RX0, RX0; \
192 vpxor s2, RX0, RX0; \
193 vpxor s3, RX0, RX0; /* s1 ^ s2 ^ s3 ^ rk */ \
196 transform_pre(RX0, RTMP4, RB0, MASK_4BIT, RTMP0); \
197 vaesenclast MASK_4BIT, RX0, RX0; \
198 transform_post(RX0, RB1, RB2, MASK_4BIT, RTMP0); \
201 vpshufb RB3, RX0, RTMP0; \
203 vpshufb RTMP2, RX0, RTMP1; \
205 vpshufb RTMP3, RX0, RTMP1; \
207 vpshufb .Linv_shift_row_rol_24 rRIP, RX0, RTMP1; \
279 vbroadcastss (4*(round))(%rdi), RX0; \
282 vmovdqa RX0, RX1; \
283 vpxor s1, RX0, RX0; \
284 vpxor s2, RX0, RX0; \
285 vpxor s3, RX0, RX0; /* s1 ^ s2 ^ s3 ^ rk */ \
293 transform_pre(RX0, RTMP4, RTMP1, MASK_4BIT, RTMP0); \
296 vaesenclast MASK_4BIT, RX0, RX0; \
298 transform_post(RX0, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
302 vpshufb RTMP4, RX0, RTMP0; \
307 vpshufb RTMP4, RX0, RTMP1; \
312 vpshufb RTMP4, RX0, RTMP1; \
317 vpshufb RTMP4, RX0, RTMP1; \