• Home
  • Raw
  • Download

Lines Matching refs:DCTSIZE

266   col0a = _mm_load_si32((__m32 *)&inptr[DCTSIZE * 1]); \
267 col1a = _mm_load_si32((__m32 *)&inptr[DCTSIZE * 2]); \
273 col0l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 0]); \
274 col1l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 1]); \
275 col2l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 2]); \
276 col3l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 3]); \
277 col4l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 4]); \
278 col5l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 5]); \
279 col6l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 6]); \
280 col7l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 7]); \
294 quant0l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 0]); \
307 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 0], row0); \
308 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 0 + 4], row0); \
309 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 1], row1); \
310 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 1 + 4], row1); \
311 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 2], row2); \
312 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 2 + 4], row2); \
313 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 3], row3); \
314 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 3 + 4], row3); \
332 col0l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 0]); /* (00 10 20 30) */ \
333 col2l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 2]); /* (02 12 22 32) */ \
334 col4l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 4]); /* (04 14 24 34) */ \
335 col6l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 6]); /* (06 16 26 36) */ \
337 quant0l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 0]); \
338 quant2l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 2]); \
339 quant4l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 4]); \
340 quant6l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 6]); \
379 col1l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 1]); /* (01 11 21 31) */ \
380 col3l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 3]); /* (03 13 23 33) */ \
381 col5l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 5]); /* (05 15 25 35) */ \
382 col7l = _mm_load_si64((__m64 *)&inptr[DCTSIZE * 7]); /* (07 17 27 37) */ \
384 quant1l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 1]); \
385 quant3l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 3]); \
386 quant5l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 5]); \
387 quant7l = _mm_load_si64((__m64 *)&quantptr[DCTSIZE * 7]); \
423 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 0], row0l); \
424 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 0 + 4], row0h); \
425 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 1], row1l); \
426 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 1 + 4], row1h); \
427 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 2], row2l); \
428 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 2 + 4], row2h); \
429 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 3], row3l); \
430 _mm_store_si64((__m64 *)&wsptr[DCTSIZE * 3 + 4], row3h); \
442 row0l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 0]); /* (00 01 02 03) */ \
443 row1l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 1]); /* (10 11 12 13) */ \
444 row2l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 2]); /* (20 21 22 23) */ \
445 row3l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 3]); /* (30 31 32 33) */ \
446 row4l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 4]); /* (40 41 42 43) */ \
447 row5l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 5]); /* (50 51 52 53) */ \
448 row6l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 6]); /* (60 61 62 63) */ \
449 row7l = _mm_load_si64((__m64 *)&wsptr[DCTSIZE * 7]); /* (70 71 72 73) */ \
560 wsptr += DCTSIZE * 4; in jsimd_idct_islow_mmi()