Lines Matching refs:_mm_clmulepi64_si128
72 *xmm_crc0 = _mm_clmulepi64_si128(*xmm_crc0, xmm_fold4, 0x01); in fold_1()
73 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10); in fold_1()
99 *xmm_crc1 = _mm_clmulepi64_si128(*xmm_crc1, xmm_fold4, 0x01); in fold_2()
100 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10); in fold_2()
106 *xmm_crc0 = _mm_clmulepi64_si128(*xmm_crc0, xmm_fold4, 0x01); in fold_2()
107 *xmm_crc2 = _mm_clmulepi64_si128(*xmm_crc2, xmm_fold4, 0x10); in fold_2()
132 *xmm_crc2 = _mm_clmulepi64_si128(*xmm_crc2, xmm_fold4, 0x01); in fold_3()
133 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x10); in fold_3()
139 *xmm_crc1 = _mm_clmulepi64_si128(*xmm_crc1, xmm_fold4, 0x01); in fold_3()
140 *xmm_crc2 = _mm_clmulepi64_si128(*xmm_crc2, xmm_fold4, 0x10); in fold_3()
146 *xmm_crc0 = _mm_clmulepi64_si128(*xmm_crc0, xmm_fold4, 0x01); in fold_3()
147 *xmm_crc1 = _mm_clmulepi64_si128(*xmm_crc1, xmm_fold4, 0x10); in fold_3()
176 *xmm_crc0 = _mm_clmulepi64_si128(*xmm_crc0, xmm_fold4, 0x01); in fold_4()
177 x_tmp0 = _mm_clmulepi64_si128(x_tmp0, xmm_fold4, 0x10); in fold_4()
182 *xmm_crc1 = _mm_clmulepi64_si128(*xmm_crc1, xmm_fold4, 0x01); in fold_4()
183 x_tmp1 = _mm_clmulepi64_si128(x_tmp1, xmm_fold4, 0x10); in fold_4()
188 *xmm_crc2 = _mm_clmulepi64_si128(*xmm_crc2, xmm_fold4, 0x01); in fold_4()
189 x_tmp2 = _mm_clmulepi64_si128(x_tmp2, xmm_fold4, 0x10); in fold_4()
194 *xmm_crc3 = _mm_clmulepi64_si128(*xmm_crc3, xmm_fold4, 0x01); in fold_4()
195 x_tmp3 = _mm_clmulepi64_si128(x_tmp3, xmm_fold4, 0x10); in fold_4()
261 xmm_a0_1 = _mm_clmulepi64_si128(xmm_a0_0, xmm_fold4, 0x10); in partial_fold()
262 xmm_a0_0 = _mm_clmulepi64_si128(xmm_a0_0, xmm_fold4, 0x01); in partial_fold()
445 x_tmp0 = _mm_clmulepi64_si128(xmm_crc0, crc_fold, 0x10); in crc_fold_512to32()
446 xmm_crc0 = _mm_clmulepi64_si128(xmm_crc0, crc_fold, 0x01); in crc_fold_512to32()
450 x_tmp1 = _mm_clmulepi64_si128(xmm_crc1, crc_fold, 0x10); in crc_fold_512to32()
451 xmm_crc1 = _mm_clmulepi64_si128(xmm_crc1, crc_fold, 0x01); in crc_fold_512to32()
455 x_tmp2 = _mm_clmulepi64_si128(xmm_crc2, crc_fold, 0x10); in crc_fold_512to32()
456 xmm_crc2 = _mm_clmulepi64_si128(xmm_crc2, crc_fold, 0x01); in crc_fold_512to32()
466 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0); in crc_fold_512to32()
472 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0x10); in crc_fold_512to32()
483 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0); in crc_fold_512to32()
488 xmm_crc3 = _mm_clmulepi64_si128(xmm_crc3, crc_fold, 0x10); in crc_fold_512to32()