1; 2; jccolext.asm - colorspace conversion (AVX2) 3; 4; Copyright (C) 2015, Intel Corporation. 5; Copyright (C) 2016, D. R. Commander. 6; 7; Based on the x86 SIMD extension for IJG JPEG library 8; Copyright (C) 1999-2006, MIYASAKA Masaru. 9; For conditions of distribution and use, see copyright notice in jsimdext.inc 10; 11; This file should be assembled with NASM (Netwide Assembler), 12; can *not* be assembled with Microsoft's MASM or any compatible 13; assembler (including Borland's Turbo Assembler). 14; NASM is available from http://nasm.sourceforge.net/ or 15; http://sourceforge.net/project/showfiles.php?group_id=6208 16; 17; [TAB8] 18 19%include "jcolsamp.inc" 20 21; -------------------------------------------------------------------------- 22; 23; Convert some rows of samples to the output colorspace. 24; 25; GLOBAL(void) 26; jsimd_rgb_ycc_convert_avx2(JDIMENSION img_width, JSAMPARRAY input_buf, 27; JSAMPIMAGE output_buf, JDIMENSION output_row, 28; int num_rows); 29; 30 31%define img_width(b) (b) + 8 ; JDIMENSION img_width 32%define input_buf(b) (b) + 12 ; JSAMPARRAY input_buf 33%define output_buf(b) (b) + 16 ; JSAMPIMAGE output_buf 34%define output_row(b) (b) + 20 ; JDIMENSION output_row 35%define num_rows(b) (b) + 24 ; int num_rows 36 37%define original_ebp ebp + 0 38%define wk(i) ebp - (WK_NUM - (i)) * SIZEOF_YMMWORD 39 ; ymmword wk[WK_NUM] 40%define WK_NUM 8 41%define gotptr wk(0) - SIZEOF_POINTER ; void * gotptr 42 43 align 32 44 GLOBAL_FUNCTION(jsimd_rgb_ycc_convert_avx2) 45 46EXTN(jsimd_rgb_ycc_convert_avx2): 47 push ebp 48 mov eax, esp ; eax = original ebp 49 sub esp, byte 4 50 and esp, byte (-SIZEOF_YMMWORD) ; align to 256 bits 51 mov [esp], eax 52 mov ebp, esp ; ebp = aligned ebp 53 lea esp, [wk(0)] 54 pushpic eax ; make a room for GOT address 55 push ebx 56; push ecx ; need not be preserved 57; push edx ; need not be preserved 58 push esi 59 push edi 60 61 get_GOT ebx ; get GOT address 62 movpic POINTER [gotptr], ebx ; save GOT address 63 64 mov ecx, JDIMENSION [img_width(eax)] 65 test ecx, ecx 66 jz near .return 67 68 push ecx 69 70 mov esi, JSAMPIMAGE [output_buf(eax)] 71 mov ecx, JDIMENSION [output_row(eax)] 72 mov edi, JSAMPARRAY [esi+0*SIZEOF_JSAMPARRAY] 73 mov ebx, JSAMPARRAY [esi+1*SIZEOF_JSAMPARRAY] 74 mov edx, JSAMPARRAY [esi+2*SIZEOF_JSAMPARRAY] 75 lea edi, [edi+ecx*SIZEOF_JSAMPROW] 76 lea ebx, [ebx+ecx*SIZEOF_JSAMPROW] 77 lea edx, [edx+ecx*SIZEOF_JSAMPROW] 78 79 pop ecx 80 81 mov esi, JSAMPARRAY [input_buf(eax)] 82 mov eax, INT [num_rows(eax)] 83 test eax, eax 84 jle near .return 85 alignx 16, 7 86.rowloop: 87 pushpic eax 88 push edx 89 push ebx 90 push edi 91 push esi 92 push ecx ; col 93 94 mov esi, JSAMPROW [esi] ; inptr 95 mov edi, JSAMPROW [edi] ; outptr0 96 mov ebx, JSAMPROW [ebx] ; outptr1 97 mov edx, JSAMPROW [edx] ; outptr2 98 movpic eax, POINTER [gotptr] ; load GOT address (eax) 99 100 cmp ecx, byte SIZEOF_YMMWORD 101 jae near .columnloop 102 alignx 16, 7 103 104%if RGB_PIXELSIZE == 3 ; --------------- 105 106.column_ld1: 107 push eax 108 push edx 109 lea ecx, [ecx+ecx*2] ; imul ecx,RGB_PIXELSIZE 110 test cl, SIZEOF_BYTE 111 jz short .column_ld2 112 sub ecx, byte SIZEOF_BYTE 113 movzx eax, BYTE [esi+ecx] 114.column_ld2: 115 test cl, SIZEOF_WORD 116 jz short .column_ld4 117 sub ecx, byte SIZEOF_WORD 118 movzx edx, WORD [esi+ecx] 119 shl eax, WORD_BIT 120 or eax, edx 121.column_ld4: 122 vmovd xmmA, eax 123 pop edx 124 pop eax 125 test cl, SIZEOF_DWORD 126 jz short .column_ld8 127 sub ecx, byte SIZEOF_DWORD 128 vmovd xmmF, XMM_DWORD [esi+ecx] 129 vpslldq xmmA, xmmA, SIZEOF_DWORD 130 vpor xmmA, xmmA, xmmF 131.column_ld8: 132 test cl, SIZEOF_MMWORD 133 jz short .column_ld16 134 sub ecx, byte SIZEOF_MMWORD 135 vmovq xmmB, XMM_MMWORD [esi+ecx] 136 vpslldq xmmA, xmmA, SIZEOF_MMWORD 137 vpor xmmA, xmmA, xmmB 138.column_ld16: 139 test cl, SIZEOF_XMMWORD 140 jz short .column_ld32 141 sub ecx, byte SIZEOF_XMMWORD 142 vmovdqu xmmB, XMM_MMWORD [esi+ecx] 143 vperm2i128 ymmA, ymmA, ymmA, 1 144 vpor ymmA, ymmB 145.column_ld32: 146 test cl, SIZEOF_YMMWORD 147 jz short .column_ld64 148 sub ecx, byte SIZEOF_YMMWORD 149 vmovdqa ymmF, ymmA 150 vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD] 151.column_ld64: 152 test cl, 2*SIZEOF_YMMWORD 153 mov ecx, SIZEOF_YMMWORD 154 jz short .rgb_ycc_cnv 155 vmovdqa ymmB, ymmA 156 vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD] 157 vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD] 158 jmp short .rgb_ycc_cnv 159 alignx 16, 7 160 161.columnloop: 162 vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD] 163 vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD] 164 vmovdqu ymmB, YMMWORD [esi+2*SIZEOF_YMMWORD] 165 166.rgb_ycc_cnv: 167 ; ymmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05 168 ; 15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A) 169 ; ymmF=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F 170 ; 0G 1G 2G 0H 1H 2H 0I 1I 2I 0J 1J 2J 0K 1K 2K 0L) 171 ; ymmB=(1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q 172 ; 2Q 0R 1R 2R 0S 1S 2S 0T 1T 2T 0U 1U 2U 0V 1V 2V) 173 174 vmovdqu ymmC, ymmA 175 vinserti128 ymmA, ymmF, xmmA, 0 ; ymmA=(00 10 20 01 11 21 02 12 22 03 13 23 04 14 24 05 176 ; 0G 1G 2G 0H 1H 2H 0I 1I 2I 0J 1J 2J 0K 1K 2K 0L) 177 vinserti128 ymmC, ymmC, xmmB, 0 ; ymmC=(1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q 178 ; 15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A) 179 vinserti128 ymmB, ymmB, xmmF, 0 ; ymmB=(2A 0B 1B 2B 0C 1C 2C 0D 1D 2D 0E 1E 2E 0F 1F 2F 180 ; 2Q 0R 1R 2R 0S 1S 2S 0T 1T 2T 0U 1U 2U 0V 1V 2V) 181 vperm2i128 ymmF, ymmC, ymmC, 1 ; ymmF=(15 25 06 16 26 07 17 27 08 18 28 09 19 29 0A 1A 182 ; 1L 2L 0M 1M 2M 0N 1N 2N 0O 1O 2O 0P 1P 2P 0Q 1Q) 183 184 vmovdqa ymmG, ymmA 185 vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 10 20 01 11 21 02 12 186 ; 22 03 13 23 04 14 24 05 0G 1G 2G 0H 1H 2H 0I 1I) 187 vpsrldq ymmG, ymmG, 8 ; ymmG=(22 03 13 23 04 14 24 05 0G 1G 2G 0H 1H 2H 0I 1I 188 ; 2I 0J 1J 2J 0K 1K 2K 0L -- -- -- -- -- -- -- --) 189 190 vpunpckhbw ymmA, ymmA, ymmF ; ymmA=(00 08 10 18 20 28 01 09 11 19 21 29 02 0A 12 1A 191 ; 0G 0O 1G 1O 2G 2O 0H 0P 1H 1P 2H 2P 0I 0Q 1I 1Q) 192 vpslldq ymmF, ymmF, 8 ; ymmF=(-- -- -- -- -- -- -- -- 15 25 06 16 26 07 17 27 193 ; 08 18 28 09 19 29 0A 1A 1L 2L 0M 1M 2M 0N 1N 2N) 194 195 vpunpcklbw ymmG, ymmG, ymmB ; ymmG=(22 2A 03 0B 13 1B 23 2B 04 0C 14 1C 24 2C 05 0D 196 ; 2I 2Q 0J 0R 1J 1R 2J 2R 0K 0S 1K 1S 2K 2S 0L 0T) 197 vpunpckhbw ymmF, ymmF, ymmB ; ymmF=(15 1D 25 2D 06 0E 16 1E 26 2E 07 0F 17 1F 27 2F 198 ; 1L 1T 2L 2T 0M 0U 1M 1U 2M 2U 0N 0V 1N 1V 2N 2V) 199 200 vmovdqa ymmD, ymmA 201 vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 08 10 18 20 28 01 09 202 ; 11 19 21 29 02 0A 12 1A 0G 0O 1G 1O 2G 2O 0H 0P) 203 vpsrldq ymmD, ymmD, 8 ; ymmD=(11 19 21 29 02 0A 12 1A 0G 0O 1G 1O 2G 2O 0H 0P 204 ; 1H 1P 2H 2P 0I 0Q 1I 1Q -- -- -- -- -- -- -- --) 205 206 vpunpckhbw ymmA, ymmA, ymmG ; ymmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 01 05 09 0D 207 ; 0G 0K 0O 0S 1G 1K 1O 1S 2G 2K 2O 2S 0H 0L 0P 0T) 208 vpslldq ymmG, ymmG, 8 ; ymmG=(-- -- -- -- -- -- -- -- 22 2A 03 0B 13 1B 23 2B 209 ; 04 0C 14 1C 24 2C 05 0D 2I 2Q 0J 0R 1J 1R 2J 2R) 210 211 vpunpcklbw ymmD, ymmD, ymmF ; ymmD=(11 15 19 1D 21 25 29 2D 02 06 0A 0E 12 16 1A 1E 212 ; 1H 1L 1P 1T 2H 2L 2P 2T 0I 0M 0Q 0U 1I 1M 1Q 1U) 213 vpunpckhbw ymmG, ymmG, ymmF ; ymmG=(22 26 2A 2E 03 07 0B 0F 13 17 1B 1F 23 27 2B 2F 214 ; 2I 2M 2Q 2U 0J 0N 0R 0V 1J 1N 1R 1V 2J 2N 2R 2V) 215 216 vmovdqa ymmE, ymmA 217 vpslldq ymmA, ymmA, 8 ; ymmA=(-- -- -- -- -- -- -- -- 00 04 08 0C 10 14 18 1C 218 ; 20 24 28 2C 01 05 09 0D 0G 0K 0O 0S 1G 1K 1O 1S) 219 vpsrldq ymmE, ymmE, 8 ; ymmE=(20 24 28 2C 01 05 09 0D 0G 0K 0O 0S 1G 1K 1O 1S 220 ; 2G 2K 2O 2S 0H 0L 0P 0T -- -- -- -- -- -- -- --) 221 222 vpunpckhbw ymmA, ymmA, ymmD ; ymmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E 223 ; 0G 0I 0K 0M 0O 0Q 0S 0U 1G 1I 1K 1M 1O 1Q 1S 1U) 224 vpslldq ymmD, ymmD, 8 ; ymmD=(-- -- -- -- -- -- -- -- 11 15 19 1D 21 25 29 2D 225 ; 02 06 0A 0E 12 16 1A 1E 1H 1L 1P 1T 2H 2L 2P 2T) 226 227 vpunpcklbw ymmE, ymmE, ymmG ; ymmE=(20 22 24 26 28 2A 2C 2E 01 03 05 07 09 0B 0D 0F 228 ; 2G 2I 2K 2M 2O 2Q 2S 2U 0H 0J 0L 0N 0P 0R 0T 0V) 229 vpunpckhbw ymmD, ymmD, ymmG ; ymmD=(11 13 15 17 19 1B 1D 1F 21 23 25 27 29 2B 2D 2F 230 ; 1H 1J 1L 1N 1P 1R 1T 1V 2H 2J 2L 2N 2P 2R 2T 2V) 231 232 vpxor ymmH, ymmH, ymmH 233 234 vmovdqa ymmC, ymmA 235 vpunpcklbw ymmA, ymmA, ymmH ; ymmA=(00 02 04 06 08 0A 0C 0E 0G 0I 0K 0M 0O 0Q 0S 0U) 236 vpunpckhbw ymmC, ymmC, ymmH ; ymmC=(10 12 14 16 18 1A 1C 1E 1G 1I 1K 1M 1O 1Q 1S 1U) 237 238 vmovdqa ymmB, ymmE 239 vpunpcklbw ymmE, ymmE, ymmH ; ymmE=(20 22 24 26 28 2A 2C 2E 2G 2I 2K 2M 2O 2Q 2S 2U) 240 vpunpckhbw ymmB, ymmB, ymmH ; ymmB=(01 03 05 07 09 0B 0D 0F 0H 0J 0L 0N 0P 0R 0T 0V) 241 242 vmovdqa ymmF, ymmD 243 vpunpcklbw ymmD, ymmD, ymmH ; ymmD=(11 13 15 17 19 1B 1D 1F 1H 1J 1L 1N 1P 1R 1T 1V) 244 vpunpckhbw ymmF, ymmF, ymmH ; ymmF=(21 23 25 27 29 2B 2D 2F 2H 2J 2L 2N 2P 2R 2T 2V) 245 246%else ; RGB_PIXELSIZE == 4 ; ----------- 247 248.column_ld1: 249 test cl, SIZEOF_XMMWORD/16 250 jz short .column_ld2 251 sub ecx, byte SIZEOF_XMMWORD/16 252 vmovd xmmA, XMM_DWORD [esi+ecx*RGB_PIXELSIZE] 253.column_ld2: 254 test cl, SIZEOF_XMMWORD/8 255 jz short .column_ld4 256 sub ecx, byte SIZEOF_XMMWORD/8 257 vmovq xmmF, XMM_MMWORD [esi+ecx*RGB_PIXELSIZE] 258 vpslldq xmmA, xmmA, SIZEOF_MMWORD 259 vpor xmmA, xmmA, xmmF 260.column_ld4: 261 test cl, SIZEOF_XMMWORD/4 262 jz short .column_ld8 263 sub ecx, byte SIZEOF_XMMWORD/4 264 vmovdqa xmmF, xmmA 265 vperm2i128 ymmF, ymmF, ymmF, 1 266 vmovdqu xmmA, XMMWORD [esi+ecx*RGB_PIXELSIZE] 267 vpor ymmA, ymmA, ymmF 268.column_ld8: 269 test cl, SIZEOF_XMMWORD/2 270 jz short .column_ld16 271 sub ecx, byte SIZEOF_XMMWORD/2 272 vmovdqa ymmF, ymmA 273 vmovdqu ymmA, YMMWORD [esi+ecx*RGB_PIXELSIZE] 274.column_ld16: 275 test cl, SIZEOF_XMMWORD 276 mov ecx, SIZEOF_YMMWORD 277 jz short .rgb_ycc_cnv 278 vmovdqa ymmE, ymmA 279 vmovdqa ymmH, ymmF 280 vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD] 281 vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD] 282 jmp short .rgb_ycc_cnv 283 alignx 16, 7 284 285.columnloop: 286 vmovdqu ymmA, YMMWORD [esi+0*SIZEOF_YMMWORD] 287 vmovdqu ymmF, YMMWORD [esi+1*SIZEOF_YMMWORD] 288 vmovdqu ymmE, YMMWORD [esi+2*SIZEOF_YMMWORD] 289 vmovdqu ymmH, YMMWORD [esi+3*SIZEOF_YMMWORD] 290 291.rgb_ycc_cnv: 292 ; ymmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33 293 ; 04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37) 294 ; ymmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B 295 ; 0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F) 296 ; ymmE=(0G 1G 2G 3G 0H 1H 2H 3H 0I 1I 2I 3I 0J 1J 2J 3J 297 ; 0K 1K 2K 3K 0L 1L 2L 3L 0M 1M 2M 3M 0N 1N 2N 3N) 298 ; ymmH=(0O 1O 2O 3O 0P 1P 2P 3P 0Q 1Q 2Q 3Q 0R 1R 2R 3R 299 ; 0S 1S 2S 3S 0T 1T 2T 3T 0U 1U 2U 3U 0V 1V 2V 3V) 300 301 vmovdqa ymmB, ymmA 302 vinserti128 ymmA, ymmA, xmmE, 1 ; ymmA=(00 10 20 30 01 11 21 31 02 12 22 32 03 13 23 33 303 ; 0G 1G 2G 3G 0H 1H 2H 3H 0I 1I 2I 3I 0J 1J 2J 3J) 304 vperm2i128 ymmE, ymmB, ymmE, 0x31 ; ymmE=(04 14 24 34 05 15 25 35 06 16 26 36 07 17 27 37 305 ; 0K 1K 2K 3K 0L 1L 2L 3L 0M 1M 2M 3M 0N 1N 2N 3N) 306 307 vmovdqa ymmB, ymmF 308 vinserti128 ymmF, ymmF, xmmH, 1 ; ymmF=(08 18 28 38 09 19 29 39 0A 1A 2A 3A 0B 1B 2B 3B 309 ; 0O 1O 2O 3O 0P 1P 2P 3P 0Q 1Q 2Q 3Q 0R 1R 2R 3R) 310 vperm2i128 ymmH, ymmB, ymmH, 0x31 ; ymmH=(0C 1C 2C 3C 0D 1D 2D 3D 0E 1E 2E 3E 0F 1F 2F 3F 311 ; 0S 1S 2S 3S 0T 1T 2T 3T 0U 1U 2U 3U 0V 1V 2V 3V) 312 313 vmovdqa ymmD, ymmA 314 vpunpcklbw ymmA, ymmA, ymmE ; ymmA=(00 04 10 14 20 24 30 34 01 05 11 15 21 25 31 35 315 ; 0G 0K 1G 1K 2G 2K 3G 3K 0H 0L 1H 1L 2H 2L 3H 3L) 316 vpunpckhbw ymmD, ymmD, ymmE ; ymmD=(02 06 12 16 22 26 32 36 03 07 13 17 23 27 33 37 317 ; 0I 0M 1I 1M 2I 2M 3I 3M 0J 0N 1J 1N 2J 2N 3J 3N) 318 319 vmovdqa ymmC, ymmF 320 vpunpcklbw ymmF, ymmF, ymmH ; ymmF=(08 0C 18 1C 28 2C 38 3C 09 0D 19 1D 29 2D 39 3D 321 ; 0O 0S 1O 1S 2O 2S 3O 3S 0P 0T 1P 1T 2P 2T 3P 3T) 322 vpunpckhbw ymmC, ymmC, ymmH ; ymmC=(0A 0E 1A 1E 2A 2E 3A 3E 0B 0F 1B 1F 2B 2F 3B 3F 323 ; 0Q 0U 1Q 1U 2Q 2U 3Q 3U 0R 0V 1R 1V 2R 2V 3R 3V) 324 325 vmovdqa ymmB, ymmA 326 vpunpcklwd ymmA, ymmA, ymmF ; ymmA=(00 04 08 0C 10 14 18 1C 20 24 28 2C 30 34 38 3C 327 ; 0G 0K 0O 0S 1G 1K 1O 1S 2G 2K 2O 2S 3G 3K 3O 3S) 328 vpunpckhwd ymmB, ymmB, ymmF ; ymmB=(01 05 09 0D 11 15 19 1D 21 25 29 2D 31 35 39 3D 329 ; 0H 0L 0P 0T 1H 1L 1P 1T 2H 2L 2P 2T 3H 3L 3P 3T) 330 331 vmovdqa ymmG, ymmD 332 vpunpcklwd ymmD, ymmD, ymmC ; ymmD=(02 06 0A 0E 12 16 1A 1E 22 26 2A 2E 32 36 3A 3E 333 ; 0I 0M 0Q 0U 1I 1M 1Q 1U 2I 2M 2Q 2U 3I 3M 3Q 3U) 334 vpunpckhwd ymmG, ymmG, ymmC ; ymmG=(03 07 0B 0F 13 17 1B 1F 23 27 2B 2F 33 37 3B 3F 335 ; 0J 0N 0R 0V 1J 1N 1R 1V 2J 2N 2R 2V 3J 3N 3R 3V) 336 337 vmovdqa ymmE, ymmA 338 vpunpcklbw ymmA, ymmA, ymmD ; ymmA=(00 02 04 06 08 0A 0C 0E 10 12 14 16 18 1A 1C 1E 339 ; 0G 0I 0K 0M 0O 0Q 0S 0U 1G 1I 1K 1M 1O 1Q 1S 1U) 340 vpunpckhbw ymmE, ymmE, ymmD ; ymmE=(20 22 24 26 28 2A 2C 2E 30 32 34 36 38 3A 3C 3E 341 ; 2G 2I 2K 2M 2O 2Q 2S 2U 3G 3I 3K 3M 3O 3Q 3S 3U) 342 343 vmovdqa ymmH, ymmB 344 vpunpcklbw ymmB, ymmB, ymmG ; ymmB=(01 03 05 07 09 0B 0D 0F 11 13 15 17 19 1B 1D 1F 345 ; 0H 0J 0L 0N 0P 0R 0T 0V 1H 1J 1L 1N 1P 1R 1T 1V) 346 vpunpckhbw ymmH, ymmH, ymmG ; ymmH=(21 23 25 27 29 2B 2D 2F 31 33 35 37 39 3B 3D 3F 347 ; 2H 2J 2L 2N 2P 2R 2T 2V 3H 3J 3L 3N 3P 3R 3T 3V) 348 349 vpxor ymmF, ymmF, ymmF 350 351 vmovdqa ymmC, ymmA 352 vpunpcklbw ymmA, ymmA, ymmF ; ymmA=(00 02 04 06 08 0A 0C 0E 0G 0I 0K 0M 0O 0Q 0S 0U) 353 vpunpckhbw ymmC, ymmC, ymmF ; ymmC=(10 12 14 16 18 1A 1C 1E 1G 1I 1K 1M 1O 1Q 1S 1U) 354 355 vmovdqa ymmD, ymmB 356 vpunpcklbw ymmB, ymmB, ymmF ; ymmB=(01 03 05 07 09 0B 0D 0F 0H 0J 0L 0N 0P 0R 0T 0V) 357 vpunpckhbw ymmD, ymmD, ymmF ; ymmD=(11 13 15 17 19 1B 1D 1F 1H 1J 1L 1N 1P 1R 1T 1V) 358 359 vmovdqa ymmG, ymmE 360 vpunpcklbw ymmE, ymmE, ymmF ; ymmE=(20 22 24 26 28 2A 2C 2E 2G 2I 2K 2M 2O 2Q 2S 2U) 361 vpunpckhbw ymmG, ymmG, ymmF ; ymmG=(30 32 34 36 38 3A 3C 3E 3G 3I 3K 3M 3O 3Q 3S 3U) 362 363 vpunpcklbw ymmF, ymmF, ymmH 364 vpunpckhbw ymmH, ymmH, ymmH 365 vpsrlw ymmF, ymmF, BYTE_BIT ; ymmF=(21 23 25 27 29 2B 2D 2F 2H 2J 2L 2N 2P 2R 2T 2V) 366 vpsrlw ymmH, ymmH, BYTE_BIT ; ymmH=(31 33 35 37 39 3B 3D 3F 3H 3J 3L 3N 3P 3R 3T 3V) 367 368%endif ; RGB_PIXELSIZE ; --------------- 369 370 ; ymm0=R(02468ACEGIKMOQSU)=RE, ymm2=G(02468ACEGIKMOQSU)=GE, ymm4=B(02468ACEGIKMOQSU)=BE 371 ; ymm1=R(13579BDFHJLNPRTV)=RO, ymm3=G(13579BDFHJLNPRTV)=GO, ymm5=B(13579BDFHJLNPRTV)=BO 372 373 ; (Original) 374 ; Y = 0.29900 * R + 0.58700 * G + 0.11400 * B 375 ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE 376 ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE 377 ; 378 ; (This implementation) 379 ; Y = 0.29900 * R + 0.33700 * G + 0.11400 * B + 0.25000 * G 380 ; Cb = -0.16874 * R - 0.33126 * G + 0.50000 * B + CENTERJSAMPLE 381 ; Cr = 0.50000 * R - 0.41869 * G - 0.08131 * B + CENTERJSAMPLE 382 383 vmovdqa YMMWORD [wk(0)], ymm0 ; wk(0)=RE 384 vmovdqa YMMWORD [wk(1)], ymm1 ; wk(1)=RO 385 vmovdqa YMMWORD [wk(2)], ymm4 ; wk(2)=BE 386 vmovdqa YMMWORD [wk(3)], ymm5 ; wk(3)=BO 387 388 vmovdqa ymm6, ymm1 389 vpunpcklwd ymm1, ymm1, ymm3 390 vpunpckhwd ymm6, ymm6, ymm3 391 vmovdqa ymm7, ymm1 392 vmovdqa ymm4, ymm6 393 vpmaddwd ymm1, ymm1, [GOTOFF(eax,PW_F0299_F0337)] ; ymm1=ROL*FIX(0.299)+GOL*FIX(0.337) 394 vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0299_F0337)] ; ymm6=ROH*FIX(0.299)+GOH*FIX(0.337) 395 vpmaddwd ymm7, ymm7, [GOTOFF(eax,PW_MF016_MF033)] ; ymm7=ROL*-FIX(0.168)+GOL*-FIX(0.331) 396 vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_MF016_MF033)] ; ymm4=ROH*-FIX(0.168)+GOH*-FIX(0.331) 397 398 vmovdqa YMMWORD [wk(4)], ymm1 ; wk(4)=ROL*FIX(0.299)+GOL*FIX(0.337) 399 vmovdqa YMMWORD [wk(5)], ymm6 ; wk(5)=ROH*FIX(0.299)+GOH*FIX(0.337) 400 401 vpxor ymm1, ymm1, ymm1 402 vpxor ymm6, ymm6, ymm6 403 vpunpcklwd ymm1, ymm1, ymm5 ; ymm1=BOL 404 vpunpckhwd ymm6, ymm6, ymm5 ; ymm6=BOH 405 vpsrld ymm1, ymm1, 1 ; ymm1=BOL*FIX(0.500) 406 vpsrld ymm6, ymm6, 1 ; ymm6=BOH*FIX(0.500) 407 408 vmovdqa ymm5, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm5=[PD_ONEHALFM1_CJ] 409 410 vpaddd ymm7, ymm7, ymm1 411 vpaddd ymm4, ymm4, ymm6 412 vpaddd ymm7, ymm7, ymm5 413 vpaddd ymm4, ymm4, ymm5 414 vpsrld ymm7, ymm7, SCALEBITS ; ymm7=CbOL 415 vpsrld ymm4, ymm4, SCALEBITS ; ymm4=CbOH 416 vpackssdw ymm7, ymm7, ymm4 ; ymm7=CbO 417 418 vmovdqa ymm1, YMMWORD [wk(2)] ; ymm1=BE 419 420 vmovdqa ymm6, ymm0 421 vpunpcklwd ymm0, ymm0, ymm2 422 vpunpckhwd ymm6, ymm6, ymm2 423 vmovdqa ymm5, ymm0 424 vmovdqa ymm4, ymm6 425 vpmaddwd ymm0, ymm0, [GOTOFF(eax,PW_F0299_F0337)] ; ymm0=REL*FIX(0.299)+GEL*FIX(0.337) 426 vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0299_F0337)] ; ymm6=REH*FIX(0.299)+GEH*FIX(0.337) 427 vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF016_MF033)] ; ymm5=REL*-FIX(0.168)+GEL*-FIX(0.331) 428 vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_MF016_MF033)] ; ymm4=REH*-FIX(0.168)+GEH*-FIX(0.331) 429 430 vmovdqa YMMWORD [wk(6)], ymm0 ; wk(6)=REL*FIX(0.299)+GEL*FIX(0.337) 431 vmovdqa YMMWORD [wk(7)], ymm6 ; wk(7)=REH*FIX(0.299)+GEH*FIX(0.337) 432 433 vpxor ymm0, ymm0, ymm0 434 vpxor ymm6, ymm6, ymm6 435 vpunpcklwd ymm0, ymm0, ymm1 ; ymm0=BEL 436 vpunpckhwd ymm6, ymm6, ymm1 ; ymm6=BEH 437 vpsrld ymm0, ymm0, 1 ; ymm0=BEL*FIX(0.500) 438 vpsrld ymm6, ymm6, 1 ; ymm6=BEH*FIX(0.500) 439 440 vmovdqa ymm1, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm1=[PD_ONEHALFM1_CJ] 441 442 vpaddd ymm5, ymm5, ymm0 443 vpaddd ymm4, ymm4, ymm6 444 vpaddd ymm5, ymm5, ymm1 445 vpaddd ymm4, ymm4, ymm1 446 vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CbEL 447 vpsrld ymm4, ymm4, SCALEBITS ; ymm4=CbEH 448 vpackssdw ymm5, ymm5, ymm4 ; ymm5=CbE 449 450 vpsllw ymm7, ymm7, BYTE_BIT 451 vpor ymm5, ymm5, ymm7 ; ymm5=Cb 452 vmovdqu YMMWORD [ebx], ymm5 ; Save Cb 453 454 vmovdqa ymm0, YMMWORD [wk(3)] ; ymm0=BO 455 vmovdqa ymm6, YMMWORD [wk(2)] ; ymm6=BE 456 vmovdqa ymm1, YMMWORD [wk(1)] ; ymm1=RO 457 458 vmovdqa ymm4, ymm0 459 vpunpcklwd ymm0, ymm0, ymm3 460 vpunpckhwd ymm4, ymm4, ymm3 461 vmovdqa ymm7, ymm0 462 vmovdqa ymm5, ymm4 463 vpmaddwd ymm0, ymm0, [GOTOFF(eax,PW_F0114_F0250)] ; ymm0=BOL*FIX(0.114)+GOL*FIX(0.250) 464 vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_F0114_F0250)] ; ymm4=BOH*FIX(0.114)+GOH*FIX(0.250) 465 vpmaddwd ymm7, ymm7, [GOTOFF(eax,PW_MF008_MF041)] ; ymm7=BOL*-FIX(0.081)+GOL*-FIX(0.418) 466 vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF008_MF041)] ; ymm5=BOH*-FIX(0.081)+GOH*-FIX(0.418) 467 468 vmovdqa ymm3, [GOTOFF(eax,PD_ONEHALF)] ; ymm3=[PD_ONEHALF] 469 470 vpaddd ymm0, ymm0, YMMWORD [wk(4)] 471 vpaddd ymm4, ymm4, YMMWORD [wk(5)] 472 vpaddd ymm0, ymm0, ymm3 473 vpaddd ymm4, ymm4, ymm3 474 vpsrld ymm0, ymm0, SCALEBITS ; ymm0=YOL 475 vpsrld ymm4, ymm4, SCALEBITS ; ymm4=YOH 476 vpackssdw ymm0, ymm0, ymm4 ; ymm0=YO 477 478 vpxor ymm3, ymm3, ymm3 479 vpxor ymm4, ymm4, ymm4 480 vpunpcklwd ymm3, ymm3, ymm1 ; ymm3=ROL 481 vpunpckhwd ymm4, ymm4, ymm1 ; ymm4=ROH 482 vpsrld ymm3, ymm3, 1 ; ymm3=ROL*FIX(0.500) 483 vpsrld ymm4, ymm4, 1 ; ymm4=ROH*FIX(0.500) 484 485 vmovdqa ymm1, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm1=[PD_ONEHALFM1_CJ] 486 487 vpaddd ymm7, ymm7, ymm3 488 vpaddd ymm5, ymm5, ymm4 489 vpaddd ymm7, ymm7, ymm1 490 vpaddd ymm5, ymm5, ymm1 491 vpsrld ymm7, ymm7, SCALEBITS ; ymm7=CrOL 492 vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CrOH 493 vpackssdw ymm7, ymm7, ymm5 ; ymm7=CrO 494 495 vmovdqa ymm3, YMMWORD [wk(0)] ; ymm3=RE 496 497 vmovdqa ymm4, ymm6 498 vpunpcklwd ymm6, ymm6, ymm2 499 vpunpckhwd ymm4, ymm4, ymm2 500 vmovdqa ymm1, ymm6 501 vmovdqa ymm5, ymm4 502 vpmaddwd ymm6, ymm6, [GOTOFF(eax,PW_F0114_F0250)] ; ymm6=BEL*FIX(0.114)+GEL*FIX(0.250) 503 vpmaddwd ymm4, ymm4, [GOTOFF(eax,PW_F0114_F0250)] ; ymm4=BEH*FIX(0.114)+GEH*FIX(0.250) 504 vpmaddwd ymm1, ymm1, [GOTOFF(eax,PW_MF008_MF041)] ; ymm1=BEL*-FIX(0.081)+GEL*-FIX(0.418) 505 vpmaddwd ymm5, ymm5, [GOTOFF(eax,PW_MF008_MF041)] ; ymm5=BEH*-FIX(0.081)+GEH*-FIX(0.418) 506 507 vmovdqa ymm2, [GOTOFF(eax,PD_ONEHALF)] ; ymm2=[PD_ONEHALF] 508 509 vpaddd ymm6, ymm6, YMMWORD [wk(6)] 510 vpaddd ymm4, ymm4, YMMWORD [wk(7)] 511 vpaddd ymm6, ymm6, ymm2 512 vpaddd ymm4, ymm4, ymm2 513 vpsrld ymm6, ymm6, SCALEBITS ; ymm6=YEL 514 vpsrld ymm4, ymm4, SCALEBITS ; ymm4=YEH 515 vpackssdw ymm6, ymm6, ymm4 ; ymm6=YE 516 517 vpsllw ymm0, ymm0, BYTE_BIT 518 vpor ymm6, ymm6, ymm0 ; ymm6=Y 519 vmovdqu YMMWORD [edi], ymm6 ; Save Y 520 521 vpxor ymm2, ymm2, ymm2 522 vpxor ymm4, ymm4, ymm4 523 vpunpcklwd ymm2, ymm2, ymm3 ; ymm2=REL 524 vpunpckhwd ymm4, ymm4, ymm3 ; ymm4=REH 525 vpsrld ymm2, ymm2, 1 ; ymm2=REL*FIX(0.500) 526 vpsrld ymm4, ymm4, 1 ; ymm4=REH*FIX(0.500) 527 528 vmovdqa ymm0, [GOTOFF(eax,PD_ONEHALFM1_CJ)] ; ymm0=[PD_ONEHALFM1_CJ] 529 530 vpaddd ymm1, ymm1, ymm2 531 vpaddd ymm5, ymm5, ymm4 532 vpaddd ymm1, ymm1, ymm0 533 vpaddd ymm5, ymm5, ymm0 534 vpsrld ymm1, ymm1, SCALEBITS ; ymm1=CrEL 535 vpsrld ymm5, ymm5, SCALEBITS ; ymm5=CrEH 536 vpackssdw ymm1, ymm1, ymm5 ; ymm1=CrE 537 538 vpsllw ymm7, ymm7, BYTE_BIT 539 vpor ymm1, ymm1, ymm7 ; ymm1=Cr 540 vmovdqu YMMWORD [edx], ymm1 ; Save Cr 541 542 sub ecx, byte SIZEOF_YMMWORD 543 add esi, RGB_PIXELSIZE*SIZEOF_YMMWORD ; inptr 544 add edi, byte SIZEOF_YMMWORD ; outptr0 545 add ebx, byte SIZEOF_YMMWORD ; outptr1 546 add edx, byte SIZEOF_YMMWORD ; outptr2 547 cmp ecx, byte SIZEOF_YMMWORD 548 jae near .columnloop 549 test ecx, ecx 550 jnz near .column_ld1 551 552 pop ecx ; col 553 pop esi 554 pop edi 555 pop ebx 556 pop edx 557 poppic eax 558 559 add esi, byte SIZEOF_JSAMPROW ; input_buf 560 add edi, byte SIZEOF_JSAMPROW 561 add ebx, byte SIZEOF_JSAMPROW 562 add edx, byte SIZEOF_JSAMPROW 563 dec eax ; num_rows 564 jg near .rowloop 565 566.return: 567 vzeroupper 568 pop edi 569 pop esi 570; pop edx ; need not be preserved 571; pop ecx ; need not be preserved 572 pop ebx 573 mov esp, ebp ; esp <- aligned ebp 574 pop esp ; esp <- original ebp 575 pop ebp 576 ret 577 578; For some reason, the OS X linker does not honor the request to align the 579; segment unless we do this. 580 align 32 581