Lines Matching refs:src
8 #define load_8way(src, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
9 vmovdqu (0*16)(src), x0; \
10 vmovdqu (1*16)(src), x1; \
11 vmovdqu (2*16)(src), x2; \
12 vmovdqu (3*16)(src), x3; \
13 vmovdqu (4*16)(src), x4; \
14 vmovdqu (5*16)(src), x5; \
15 vmovdqu (6*16)(src), x6; \
16 vmovdqu (7*16)(src), x7;
28 #define store_cbc_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
29 vpxor (0*16)(src), x1, x1; \
30 vpxor (1*16)(src), x2, x2; \
31 vpxor (2*16)(src), x3, x3; \
32 vpxor (3*16)(src), x4, x4; \
33 vpxor (4*16)(src), x5, x5; \
34 vpxor (5*16)(src), x6, x6; \
35 vpxor (6*16)(src), x7, x7; \
72 #define store_ctr_8way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \ argument
73 vpxor (0*16)(src), x0, x0; \
74 vpxor (1*16)(src), x1, x1; \
75 vpxor (2*16)(src), x2, x2; \
76 vpxor (3*16)(src), x3, x3; \
77 vpxor (4*16)(src), x4, x4; \
78 vpxor (5*16)(src), x5, x5; \
79 vpxor (6*16)(src), x6, x6; \
80 vpxor (7*16)(src), x7, x7; \
90 #define load_xts_8way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, t0, \ argument
96 vpxor (0*16)(src), tiv, x0; \
101 vpxor (1*16)(src), tiv, x1; \
105 vpxor (2*16)(src), tiv, x2; \
109 vpxor (3*16)(src), tiv, x3; \
113 vpxor (4*16)(src), tiv, x4; \
117 vpxor (5*16)(src), tiv, x5; \
121 vpxor (6*16)(src), tiv, x6; \
125 vpxor (7*16)(src), tiv, x7; \