Lines Matching refs:RTMP2
26 #define RTMP2 %xmm5 macro
173 vmovdqa .Lbswap32_mask rRIP, RTMP2;
174 vpshufb RTMP2, RA0, RA0;
175 vpshufb RTMP2, RA1, RA1;
176 vpshufb RTMP2, RA2, RA2;
177 vpshufb RTMP2, RA3, RA3;
185 vmovdqa .Linv_shift_row_rol_8 rRIP, RTMP2;
203 vpshufb RTMP2, RX0, RTMP1; \
228 vmovdqa .Lbswap128_mask rRIP, RTMP2;
231 vpshufb RTMP2, RA0, RA0;
232 vpshufb RTMP2, RA1, RA1;
233 vpshufb RTMP2, RA2, RA2;
234 vpshufb RTMP2, RA3, RA3;
264 vmovdqa .Lbswap32_mask rRIP, RTMP2;
265 vpshufb RTMP2, RA0, RA0;
266 vpshufb RTMP2, RA1, RA1;
267 vpshufb RTMP2, RA2, RA2;
268 vpshufb RTMP2, RA3, RA3;
269 vpshufb RTMP2, RB0, RB0;
270 vpshufb RTMP2, RB1, RB1;
271 vpshufb RTMP2, RB2, RB2;
272 vpshufb RTMP2, RB3, RB3;
286 vmovdqa .Lpost_tf_lo_s rRIP, RTMP2; \
298 transform_post(RX0, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
299 transform_post(RX1, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
304 vpshufb RTMP4, RX1, RTMP2; \
306 vpxor RTMP2, r0, r0; /* r0 ^ x */ \
311 vpxor RTMP3, RTMP2, RTMP2; /* x ^ rol(x,8) */ \
316 vpxor RTMP3, RTMP2, RTMP2; /* x ^ rol(x,8) ^ rol(x,16) */ \
327 vpslld $2, RTMP2, RTMP3; \
328 vpsrld $30, RTMP2, RTMP2; \
329 vpxor RTMP2, r0, r0; \
345 vmovdqa .Lbswap128_mask rRIP, RTMP2;
349 vpshufb RTMP2, RA0, RA0;
350 vpshufb RTMP2, RA1, RA1;
351 vpshufb RTMP2, RA2, RA2;
352 vpshufb RTMP2, RA3, RA3;
353 vpshufb RTMP2, RB0, RB0;
354 vpshufb RTMP2, RB1, RB1;
355 vpshufb RTMP2, RB2, RB2;
356 vpshufb RTMP2, RB3, RB3;
448 inc_le128(RTMP0, RNOT, RTMP2); /* +1 */
450 inc_le128(RTMP0, RNOT, RTMP2); /* +2 */
452 inc_le128(RTMP0, RNOT, RTMP2); /* +3 */
454 inc_le128(RTMP0, RNOT, RTMP2); /* +4 */
456 inc_le128(RTMP0, RNOT, RTMP2); /* +5 */
458 inc_le128(RTMP0, RNOT, RTMP2); /* +6 */
460 inc_le128(RTMP0, RNOT, RTMP2); /* +7 */
462 inc_le128(RTMP0, RNOT, RTMP2); /* +8 */