1// This file is generated from a similarly-named Perl script in the BoringSSL 2// source tree. Do not edit by hand. 3 4#if defined(__has_feature) 5#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM) 6#define OPENSSL_NO_ASM 7#endif 8#endif 9 10#if !defined(OPENSSL_NO_ASM) && defined(__i386__) && defined(__ELF__) 11#if defined(BORINGSSL_PREFIX) 12#include <boringssl_prefix_symbols_asm.h> 13#endif 14.text 15.globl gcm_init_clmul 16.hidden gcm_init_clmul 17.type gcm_init_clmul,@function 18.align 16 19gcm_init_clmul: 20.L_gcm_init_clmul_begin: 21 movl 4(%esp),%edx 22 movl 8(%esp),%eax 23 call .L000pic 24.L000pic: 25 popl %ecx 26 leal .Lbswap-.L000pic(%ecx),%ecx 27 movdqu (%eax),%xmm2 28 pshufd $78,%xmm2,%xmm2 29 pshufd $255,%xmm2,%xmm4 30 movdqa %xmm2,%xmm3 31 psllq $1,%xmm2 32 pxor %xmm5,%xmm5 33 psrlq $63,%xmm3 34 pcmpgtd %xmm4,%xmm5 35 pslldq $8,%xmm3 36 por %xmm3,%xmm2 37 pand 16(%ecx),%xmm5 38 pxor %xmm5,%xmm2 39 movdqa %xmm2,%xmm0 40 movdqa %xmm0,%xmm1 41 pshufd $78,%xmm0,%xmm3 42 pshufd $78,%xmm2,%xmm4 43 pxor %xmm0,%xmm3 44 pxor %xmm2,%xmm4 45.byte 102,15,58,68,194,0 46.byte 102,15,58,68,202,17 47.byte 102,15,58,68,220,0 48 xorps %xmm0,%xmm3 49 xorps %xmm1,%xmm3 50 movdqa %xmm3,%xmm4 51 psrldq $8,%xmm3 52 pslldq $8,%xmm4 53 pxor %xmm3,%xmm1 54 pxor %xmm4,%xmm0 55 movdqa %xmm0,%xmm4 56 movdqa %xmm0,%xmm3 57 psllq $5,%xmm0 58 pxor %xmm0,%xmm3 59 psllq $1,%xmm0 60 pxor %xmm3,%xmm0 61 psllq $57,%xmm0 62 movdqa %xmm0,%xmm3 63 pslldq $8,%xmm0 64 psrldq $8,%xmm3 65 pxor %xmm4,%xmm0 66 pxor %xmm3,%xmm1 67 movdqa %xmm0,%xmm4 68 psrlq $1,%xmm0 69 pxor %xmm4,%xmm1 70 pxor %xmm0,%xmm4 71 psrlq $5,%xmm0 72 pxor %xmm4,%xmm0 73 psrlq $1,%xmm0 74 pxor %xmm1,%xmm0 75 pshufd $78,%xmm2,%xmm3 76 pshufd $78,%xmm0,%xmm4 77 pxor %xmm2,%xmm3 78 movdqu %xmm2,(%edx) 79 pxor %xmm0,%xmm4 80 movdqu %xmm0,16(%edx) 81.byte 102,15,58,15,227,8 82 movdqu %xmm4,32(%edx) 83 ret 84.size gcm_init_clmul,.-.L_gcm_init_clmul_begin 85.globl gcm_gmult_clmul 86.hidden gcm_gmult_clmul 87.type gcm_gmult_clmul,@function 88.align 16 89gcm_gmult_clmul: 90.L_gcm_gmult_clmul_begin: 91 movl 4(%esp),%eax 92 movl 8(%esp),%edx 93 call .L001pic 94.L001pic: 95 popl %ecx 96 leal .Lbswap-.L001pic(%ecx),%ecx 97 movdqu (%eax),%xmm0 98 movdqa (%ecx),%xmm5 99 movups (%edx),%xmm2 100.byte 102,15,56,0,197 101 movups 32(%edx),%xmm4 102 movdqa %xmm0,%xmm1 103 pshufd $78,%xmm0,%xmm3 104 pxor %xmm0,%xmm3 105.byte 102,15,58,68,194,0 106.byte 102,15,58,68,202,17 107.byte 102,15,58,68,220,0 108 xorps %xmm0,%xmm3 109 xorps %xmm1,%xmm3 110 movdqa %xmm3,%xmm4 111 psrldq $8,%xmm3 112 pslldq $8,%xmm4 113 pxor %xmm3,%xmm1 114 pxor %xmm4,%xmm0 115 movdqa %xmm0,%xmm4 116 movdqa %xmm0,%xmm3 117 psllq $5,%xmm0 118 pxor %xmm0,%xmm3 119 psllq $1,%xmm0 120 pxor %xmm3,%xmm0 121 psllq $57,%xmm0 122 movdqa %xmm0,%xmm3 123 pslldq $8,%xmm0 124 psrldq $8,%xmm3 125 pxor %xmm4,%xmm0 126 pxor %xmm3,%xmm1 127 movdqa %xmm0,%xmm4 128 psrlq $1,%xmm0 129 pxor %xmm4,%xmm1 130 pxor %xmm0,%xmm4 131 psrlq $5,%xmm0 132 pxor %xmm4,%xmm0 133 psrlq $1,%xmm0 134 pxor %xmm1,%xmm0 135.byte 102,15,56,0,197 136 movdqu %xmm0,(%eax) 137 ret 138.size gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin 139.globl gcm_ghash_clmul 140.hidden gcm_ghash_clmul 141.type gcm_ghash_clmul,@function 142.align 16 143gcm_ghash_clmul: 144.L_gcm_ghash_clmul_begin: 145 pushl %ebp 146 pushl %ebx 147 pushl %esi 148 pushl %edi 149 movl 20(%esp),%eax 150 movl 24(%esp),%edx 151 movl 28(%esp),%esi 152 movl 32(%esp),%ebx 153 call .L002pic 154.L002pic: 155 popl %ecx 156 leal .Lbswap-.L002pic(%ecx),%ecx 157 movdqu (%eax),%xmm0 158 movdqa (%ecx),%xmm5 159 movdqu (%edx),%xmm2 160.byte 102,15,56,0,197 161 subl $16,%ebx 162 jz .L003odd_tail 163 movdqu (%esi),%xmm3 164 movdqu 16(%esi),%xmm6 165.byte 102,15,56,0,221 166.byte 102,15,56,0,245 167 movdqu 32(%edx),%xmm5 168 pxor %xmm3,%xmm0 169 pshufd $78,%xmm6,%xmm3 170 movdqa %xmm6,%xmm7 171 pxor %xmm6,%xmm3 172 leal 32(%esi),%esi 173.byte 102,15,58,68,242,0 174.byte 102,15,58,68,250,17 175.byte 102,15,58,68,221,0 176 movups 16(%edx),%xmm2 177 nop 178 subl $32,%ebx 179 jbe .L004even_tail 180 jmp .L005mod_loop 181.align 32 182.L005mod_loop: 183 pshufd $78,%xmm0,%xmm4 184 movdqa %xmm0,%xmm1 185 pxor %xmm0,%xmm4 186 nop 187.byte 102,15,58,68,194,0 188.byte 102,15,58,68,202,17 189.byte 102,15,58,68,229,16 190 movups (%edx),%xmm2 191 xorps %xmm6,%xmm0 192 movdqa (%ecx),%xmm5 193 xorps %xmm7,%xmm1 194 movdqu (%esi),%xmm7 195 pxor %xmm0,%xmm3 196 movdqu 16(%esi),%xmm6 197 pxor %xmm1,%xmm3 198.byte 102,15,56,0,253 199 pxor %xmm3,%xmm4 200 movdqa %xmm4,%xmm3 201 psrldq $8,%xmm4 202 pslldq $8,%xmm3 203 pxor %xmm4,%xmm1 204 pxor %xmm3,%xmm0 205.byte 102,15,56,0,245 206 pxor %xmm7,%xmm1 207 movdqa %xmm6,%xmm7 208 movdqa %xmm0,%xmm4 209 movdqa %xmm0,%xmm3 210 psllq $5,%xmm0 211 pxor %xmm0,%xmm3 212 psllq $1,%xmm0 213 pxor %xmm3,%xmm0 214.byte 102,15,58,68,242,0 215 movups 32(%edx),%xmm5 216 psllq $57,%xmm0 217 movdqa %xmm0,%xmm3 218 pslldq $8,%xmm0 219 psrldq $8,%xmm3 220 pxor %xmm4,%xmm0 221 pxor %xmm3,%xmm1 222 pshufd $78,%xmm7,%xmm3 223 movdqa %xmm0,%xmm4 224 psrlq $1,%xmm0 225 pxor %xmm7,%xmm3 226 pxor %xmm4,%xmm1 227.byte 102,15,58,68,250,17 228 movups 16(%edx),%xmm2 229 pxor %xmm0,%xmm4 230 psrlq $5,%xmm0 231 pxor %xmm4,%xmm0 232 psrlq $1,%xmm0 233 pxor %xmm1,%xmm0 234.byte 102,15,58,68,221,0 235 leal 32(%esi),%esi 236 subl $32,%ebx 237 ja .L005mod_loop 238.L004even_tail: 239 pshufd $78,%xmm0,%xmm4 240 movdqa %xmm0,%xmm1 241 pxor %xmm0,%xmm4 242.byte 102,15,58,68,194,0 243.byte 102,15,58,68,202,17 244.byte 102,15,58,68,229,16 245 movdqa (%ecx),%xmm5 246 xorps %xmm6,%xmm0 247 xorps %xmm7,%xmm1 248 pxor %xmm0,%xmm3 249 pxor %xmm1,%xmm3 250 pxor %xmm3,%xmm4 251 movdqa %xmm4,%xmm3 252 psrldq $8,%xmm4 253 pslldq $8,%xmm3 254 pxor %xmm4,%xmm1 255 pxor %xmm3,%xmm0 256 movdqa %xmm0,%xmm4 257 movdqa %xmm0,%xmm3 258 psllq $5,%xmm0 259 pxor %xmm0,%xmm3 260 psllq $1,%xmm0 261 pxor %xmm3,%xmm0 262 psllq $57,%xmm0 263 movdqa %xmm0,%xmm3 264 pslldq $8,%xmm0 265 psrldq $8,%xmm3 266 pxor %xmm4,%xmm0 267 pxor %xmm3,%xmm1 268 movdqa %xmm0,%xmm4 269 psrlq $1,%xmm0 270 pxor %xmm4,%xmm1 271 pxor %xmm0,%xmm4 272 psrlq $5,%xmm0 273 pxor %xmm4,%xmm0 274 psrlq $1,%xmm0 275 pxor %xmm1,%xmm0 276 testl %ebx,%ebx 277 jnz .L006done 278 movups (%edx),%xmm2 279.L003odd_tail: 280 movdqu (%esi),%xmm3 281.byte 102,15,56,0,221 282 pxor %xmm3,%xmm0 283 movdqa %xmm0,%xmm1 284 pshufd $78,%xmm0,%xmm3 285 pshufd $78,%xmm2,%xmm4 286 pxor %xmm0,%xmm3 287 pxor %xmm2,%xmm4 288.byte 102,15,58,68,194,0 289.byte 102,15,58,68,202,17 290.byte 102,15,58,68,220,0 291 xorps %xmm0,%xmm3 292 xorps %xmm1,%xmm3 293 movdqa %xmm3,%xmm4 294 psrldq $8,%xmm3 295 pslldq $8,%xmm4 296 pxor %xmm3,%xmm1 297 pxor %xmm4,%xmm0 298 movdqa %xmm0,%xmm4 299 movdqa %xmm0,%xmm3 300 psllq $5,%xmm0 301 pxor %xmm0,%xmm3 302 psllq $1,%xmm0 303 pxor %xmm3,%xmm0 304 psllq $57,%xmm0 305 movdqa %xmm0,%xmm3 306 pslldq $8,%xmm0 307 psrldq $8,%xmm3 308 pxor %xmm4,%xmm0 309 pxor %xmm3,%xmm1 310 movdqa %xmm0,%xmm4 311 psrlq $1,%xmm0 312 pxor %xmm4,%xmm1 313 pxor %xmm0,%xmm4 314 psrlq $5,%xmm0 315 pxor %xmm4,%xmm0 316 psrlq $1,%xmm0 317 pxor %xmm1,%xmm0 318.L006done: 319.byte 102,15,56,0,197 320 movdqu %xmm0,(%eax) 321 popl %edi 322 popl %esi 323 popl %ebx 324 popl %ebp 325 ret 326.size gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin 327.align 64 328.Lbswap: 329.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 330.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194 331.byte 71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67 332.byte 82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112 333.byte 112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62 334.byte 0 335#endif // !defined(OPENSSL_NO_ASM) && defined(__i386__) && defined(__ELF__) 336#if defined(__ELF__) 337// See https://www.airs.com/blog/archives/518. 338.section .note.GNU-stack,"",%progbits 339#endif 340