/external/XNNPACK/src/f32-velu/gen/ |
D | velu-avx-rr2-lut16-p3-x40.c | 71 const uint64_t vidx0_lh = (uint64_t) _mm_extract_epi64(vidx0_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 73 const uint64_t vidx0_hh = (uint64_t) _mm_extract_epi64(vidx0_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 100 const uint64_t vidx1_lh = (uint64_t) _mm_extract_epi64(vidx1_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 102 const uint64_t vidx1_hh = (uint64_t) _mm_extract_epi64(vidx1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 129 const uint64_t vidx2_lh = (uint64_t) _mm_extract_epi64(vidx2_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 131 const uint64_t vidx2_hh = (uint64_t) _mm_extract_epi64(vidx2_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 158 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 160 const uint64_t vidx3_hh = (uint64_t) _mm_extract_epi64(vidx3_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 187 const uint64_t vidx4_lh = (uint64_t) _mm_extract_epi64(vidx4_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() 189 const uint64_t vidx4_hh = (uint64_t) _mm_extract_epi64(vidx4_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() [all …]
|
D | velu-avx-rr2-lut16-p3-x32.c | 68 const uint64_t vidx0_lh = (uint64_t) _mm_extract_epi64(vidx0_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 70 const uint64_t vidx0_hh = (uint64_t) _mm_extract_epi64(vidx0_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 97 const uint64_t vidx1_lh = (uint64_t) _mm_extract_epi64(vidx1_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 99 const uint64_t vidx1_hh = (uint64_t) _mm_extract_epi64(vidx1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 126 const uint64_t vidx2_lh = (uint64_t) _mm_extract_epi64(vidx2_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 128 const uint64_t vidx2_hh = (uint64_t) _mm_extract_epi64(vidx2_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 155 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 157 const uint64_t vidx3_hh = (uint64_t) _mm_extract_epi64(vidx3_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 272 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() 274 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() [all …]
|
D | velu-avx-rr2-lut16-p3-x48.c | 74 const uint64_t vidx0_lh = (uint64_t) _mm_extract_epi64(vidx0_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 76 const uint64_t vidx0_hh = (uint64_t) _mm_extract_epi64(vidx0_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 103 const uint64_t vidx1_lh = (uint64_t) _mm_extract_epi64(vidx1_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 105 const uint64_t vidx1_hh = (uint64_t) _mm_extract_epi64(vidx1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 132 const uint64_t vidx2_lh = (uint64_t) _mm_extract_epi64(vidx2_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 134 const uint64_t vidx2_hh = (uint64_t) _mm_extract_epi64(vidx2_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 161 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 163 const uint64_t vidx3_hh = (uint64_t) _mm_extract_epi64(vidx3_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 190 const uint64_t vidx4_lh = (uint64_t) _mm_extract_epi64(vidx4_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() 192 const uint64_t vidx4_hh = (uint64_t) _mm_extract_epi64(vidx4_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() [all …]
|
D | velu-avx-rr2-lut16-p3-x24.c | 65 const uint64_t vidx0_lh = (uint64_t) _mm_extract_epi64(vidx0_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 67 const uint64_t vidx0_hh = (uint64_t) _mm_extract_epi64(vidx0_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 94 const uint64_t vidx1_lh = (uint64_t) _mm_extract_epi64(vidx1_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 96 const uint64_t vidx1_hh = (uint64_t) _mm_extract_epi64(vidx1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 123 const uint64_t vidx2_lh = (uint64_t) _mm_extract_epi64(vidx2_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 125 const uint64_t vidx2_hh = (uint64_t) _mm_extract_epi64(vidx2_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 223 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 225 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 289 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24() 291 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x24()
|
D | velu-avx-rr2-lut16-p3-x16.c | 62 const uint64_t vidx0_lh = (uint64_t) _mm_extract_epi64(vidx0_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 64 const uint64_t vidx0_hh = (uint64_t) _mm_extract_epi64(vidx0_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 91 const uint64_t vidx1_lh = (uint64_t) _mm_extract_epi64(vidx1_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 93 const uint64_t vidx1_hh = (uint64_t) _mm_extract_epi64(vidx1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 174 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 176 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 240 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16() 242 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x16()
|
D | velu-avx-rr2-lut16-p3-x8.c | 59 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x8() 61 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x8() 125 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x8() 127 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x8()
|
D | velu-sse41-rr2-lut16-p3-x20.c | 78 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 85 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 92 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 99 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 106 const uint64_t vidxIJ = (uint64_t) _mm_extract_epi64(vidxGHIJ, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 249 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20() 292 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
|
D | velu-sse41-rr2-lut16-p3-x24.c | 83 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 90 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 97 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 104 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 111 const uint64_t vidxIJ = (uint64_t) _mm_extract_epi64(vidxGHIJ, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 118 const uint64_t vidxMN = (uint64_t) _mm_extract_epi64(vidxKLMN, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 283 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24() 326 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
|
D | velu-sse41-rr2-lut16-p3-x16.c | 73 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16() 80 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16() 87 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16() 94 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16() 215 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16() 258 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
|
D | velu-sse41-rr2-lut16-p3-x12.c | 68 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x12() 75 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x12() 82 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x12() 181 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x12() 224 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x12()
|
D | velu-sse41-rr2-lut16-p3-x8.c | 63 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x8() 70 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x8() 147 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x8() 190 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x8()
|
D | velu-sse41-rr2-lut16-p3-x4.c | 57 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x4() 100 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x4()
|
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | sse41-lut64-p2-div-x24.c | 77 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 84 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 91 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 98 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 105 const uint64_t vidxIJ = (uint64_t) _mm_extract_epi64(vidxGHIJ, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 112 const uint64_t vidxMN = (uint64_t) _mm_extract_epi64(vidxKLMN, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 265 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24() 307 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
|
D | sse41-lut64-p2-div-x16.c | 67 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16() 74 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16() 81 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16() 88 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16() 201 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16() 243 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
|
D | sse41-lut64-p2-div-x20.c | 72 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 79 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 86 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 93 const uint64_t vidxEF = (uint64_t) _mm_extract_epi64(vidxCDEF, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 100 const uint64_t vidxIJ = (uint64_t) _mm_extract_epi64(vidxGHIJ, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 233 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20() 275 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
|
D | sse41-lut64-p2-div-x12.c | 62 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12() 69 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12() 76 const uint64_t vidxAB = (uint64_t) _mm_extract_epi64(vidx89AB, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12() 169 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12() 211 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
|
D | sse41-lut64-p2-div-x8.c | 57 const uint64_t vidx23 = (uint64_t) _mm_extract_epi64(vidx0123, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x8() 64 const uint64_t vidx67 = (uint64_t) _mm_extract_epi64(vidx4567, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x8() 137 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x8() 179 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x8()
|
D | sse41-lut64-p2-div-x4.c | 50 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x4() 92 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x4()
|
/external/XNNPACK/src/math/ |
D | expm1minus-avx-rr2-lut16-p3.c | 77 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_math_f32_expm1minus__avx_rr2_lut16_p3() 79 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_math_f32_expm1minus__avx_rr2_lut16_p3()
|
D | sigmoid-avx-rr2-lut64-p2-div.c | 81 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); in xnn_math_f32_sigmoid__avx_rr2_lut64_p2_div() 83 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); in xnn_math_f32_sigmoid__avx_rr2_lut64_p2_div()
|
/external/llvm-project/clang/test/CodeGen/ |
D | ppc-smmintrin.c | 17 _mm_extract_epi64(mi, 0); in test_extract()
|
/external/XNNPACK/src/f32-velu/ |
D | avx-rr2-lut16-p3.c.in | 64 const uint64_t vidx${N}_lh = (uint64_t) _mm_extract_epi64(vidx${N}_lo, 1); 66 const uint64_t vidx${N}_hh = (uint64_t) _mm_extract_epi64(vidx${N}_hi, 1); 141 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); 143 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1); 207 const uint64_t vidx_lh = (uint64_t) _mm_extract_epi64(vidx_lo, 1); 209 const uint64_t vidx_hh = (uint64_t) _mm_extract_epi64(vidx_hi, 1);
|
D | sse-rr2-lut16-p3.c.in | 67 const uint64_t vidx${ABC[N+2:N+4]} = (uint64_t) _mm_extract_epi64(vidx${ABC[N:N+4]}, 1); 159 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1); 226 const uint64_t vidx_hi = (uint64_t) _mm_extract_epi64(vidx, 1);
|
/external/libaom/libaom/av1/encoder/x86/ |
D | wedge_utils_avx2.c | 76 csse = (uint64_t)_mm_extract_epi64(v_acc_q_0, 0); in av1_wedge_sse_from_residuals_avx2() 145 acc = (uint64_t)_mm_extract_epi64(v_acc_q_0, 0); in av1_wedge_sign_from_residuals_avx2()
|
/external/llvm-project/clang/lib/Headers/ppc_wrappers/ |
D | smmintrin.h | 51 _mm_extract_epi64(__m128i __X, const int __N) { in _mm_extract_epi64() function
|