1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include <assert.h>
29
30 #include "pipe/p_compiler.h"
31 #include "pipe/p_context.h"
32
33 #include "util/u_memory.h"
34 #include "util/u_draw.h"
35 #include "util/u_surface.h"
36 #include "util/u_upload_mgr.h"
37
38 #include "tgsi/tgsi_ureg.h"
39
40 #include "vl_csc.h"
41 #include "vl_types.h"
42
43 #include "vl_compositor_gfx.h"
44
45 enum VS_OUTPUT
46 {
47 VS_O_VPOS = 0,
48 VS_O_COLOR = 0,
49 VS_O_VTEX = 0,
50 VS_O_VTOP,
51 VS_O_VBOTTOM,
52 };
53
54 void *
create_vert_shader(struct vl_compositor * c)55 create_vert_shader(struct vl_compositor *c)
56 {
57 struct ureg_program *shader;
58 struct ureg_src vpos, vtex, color;
59 struct ureg_dst tmp;
60 struct ureg_dst o_vpos, o_vtex, o_color;
61 struct ureg_dst o_vtop, o_vbottom;
62
63 shader = ureg_create(PIPE_SHADER_VERTEX);
64 if (!shader)
65 return false;
66
67 vpos = ureg_DECL_vs_input(shader, 0);
68 vtex = ureg_DECL_vs_input(shader, 1);
69 color = ureg_DECL_vs_input(shader, 2);
70 tmp = ureg_DECL_temporary(shader);
71 o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS);
72 o_color = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, VS_O_COLOR);
73 o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX);
74 o_vtop = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP);
75 o_vbottom = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM);
76
77 /*
78 * o_vpos = vpos
79 * o_vtex = vtex
80 * o_color = color
81 */
82 ureg_MOV(shader, o_vpos, vpos);
83 ureg_MOV(shader, o_vtex, vtex);
84 ureg_MOV(shader, o_color, color);
85
86 /*
87 * tmp.x = vtex.w / 2
88 * tmp.y = vtex.w / 4
89 *
90 * o_vtop.x = vtex.x
91 * o_vtop.y = vtex.y * tmp.x + 0.25f
92 * o_vtop.z = vtex.y * tmp.y + 0.25f
93 * o_vtop.w = 1 / tmp.x
94 *
95 * o_vbottom.x = vtex.x
96 * o_vbottom.y = vtex.y * tmp.x - 0.25f
97 * o_vbottom.z = vtex.y * tmp.y - 0.25f
98 * o_vbottom.w = 1 / tmp.y
99 */
100 ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_X),
101 ureg_scalar(vtex, TGSI_SWIZZLE_W), ureg_imm1f(shader, 0.5f));
102 ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y),
103 ureg_scalar(vtex, TGSI_SWIZZLE_W), ureg_imm1f(shader, 0.25f));
104
105 ureg_MOV(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_X), vtex);
106 ureg_MAD(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_Y), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
107 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X), ureg_imm1f(shader, 0.25f));
108 ureg_MAD(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_Z), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
109 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), ureg_imm1f(shader, 0.25f));
110 ureg_RCP(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_W),
111 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X));
112
113 ureg_MOV(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_X), vtex);
114 ureg_MAD(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_Y), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
115 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X), ureg_imm1f(shader, -0.25f));
116 ureg_MAD(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_Z), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
117 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), ureg_imm1f(shader, -0.25f));
118 ureg_RCP(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_W),
119 ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y));
120
121 ureg_END(shader);
122
123 return ureg_create_shader_and_destroy(shader, c->pipe);
124 }
125
126 static void
create_frag_shader_weave(struct ureg_program * shader,struct ureg_dst fragment)127 create_frag_shader_weave(struct ureg_program *shader, struct ureg_dst fragment)
128 {
129 struct ureg_src i_tc[2];
130 struct ureg_src sampler[3];
131 struct ureg_dst t_tc[2];
132 struct ureg_dst t_texel[2];
133 unsigned i, j;
134
135 i_tc[0] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP, TGSI_INTERPOLATE_LINEAR);
136 i_tc[1] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM, TGSI_INTERPOLATE_LINEAR);
137
138 for (i = 0; i < 3; ++i) {
139 sampler[i] = ureg_DECL_sampler(shader, i);
140 ureg_DECL_sampler_view(shader, i, TGSI_TEXTURE_2D_ARRAY,
141 TGSI_RETURN_TYPE_FLOAT,
142 TGSI_RETURN_TYPE_FLOAT,
143 TGSI_RETURN_TYPE_FLOAT,
144 TGSI_RETURN_TYPE_FLOAT);
145 }
146
147 for (i = 0; i < 2; ++i) {
148 t_tc[i] = ureg_DECL_temporary(shader);
149 t_texel[i] = ureg_DECL_temporary(shader);
150 }
151
152 /* calculate the texture offsets
153 * t_tc.x = i_tc.x
154 * t_tc.y = (round(i_tc.y - 0.5) + 0.5) / height * 2
155 */
156 for (i = 0; i < 2; ++i) {
157 ureg_MOV(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_X), i_tc[i]);
158 ureg_ADD(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ),
159 i_tc[i], ureg_imm1f(shader, -0.5f));
160 ureg_ROUND(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ), ureg_src(t_tc[i]));
161 ureg_MOV(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_W),
162 ureg_imm1f(shader, i ? 1.0f : 0.0f));
163 ureg_ADD(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ),
164 ureg_src(t_tc[i]), ureg_imm1f(shader, 0.5f));
165 ureg_MUL(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_Y),
166 ureg_src(t_tc[i]), ureg_scalar(i_tc[0], TGSI_SWIZZLE_W));
167 ureg_MUL(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_Z),
168 ureg_src(t_tc[i]), ureg_scalar(i_tc[1], TGSI_SWIZZLE_W));
169 }
170
171 /* fetch the texels
172 * texel[0..1].x = tex(t_tc[0..1][0])
173 * texel[0..1].y = tex(t_tc[0..1][1])
174 * texel[0..1].z = tex(t_tc[0..1][2])
175 */
176 for (i = 0; i < 2; ++i)
177 for (j = 0; j < 3; ++j) {
178 struct ureg_src src = ureg_swizzle(ureg_src(t_tc[i]),
179 TGSI_SWIZZLE_X, j ? TGSI_SWIZZLE_Z : TGSI_SWIZZLE_Y, TGSI_SWIZZLE_W, TGSI_SWIZZLE_W);
180
181 ureg_TEX(shader, ureg_writemask(t_texel[i], TGSI_WRITEMASK_X << j),
182 TGSI_TEXTURE_2D_ARRAY, src, sampler[j]);
183 }
184
185 /* calculate linear interpolation factor
186 * factor = |round(i_tc.y) - i_tc.y| * 2
187 */
188 ureg_ROUND(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ), i_tc[0]);
189 ureg_ADD(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ),
190 ureg_src(t_tc[0]), ureg_negate(i_tc[0]));
191 ureg_MUL(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ),
192 ureg_abs(ureg_src(t_tc[0])), ureg_imm1f(shader, 2.0f));
193 ureg_LRP(shader, fragment, ureg_swizzle(ureg_src(t_tc[0]),
194 TGSI_SWIZZLE_Y, TGSI_SWIZZLE_Z, TGSI_SWIZZLE_Z, TGSI_SWIZZLE_Z),
195 ureg_src(t_texel[0]), ureg_src(t_texel[1]));
196
197 for (i = 0; i < 2; ++i) {
198 ureg_release_temporary(shader, t_texel[i]);
199 ureg_release_temporary(shader, t_tc[i]);
200 }
201 }
202
203 static void
create_frag_shader_csc(struct ureg_program * shader,struct ureg_dst texel,struct ureg_dst fragment)204 create_frag_shader_csc(struct ureg_program *shader, struct ureg_dst texel,
205 struct ureg_dst fragment)
206 {
207 struct ureg_src csc[3];
208 struct ureg_src lumakey;
209 struct ureg_dst temp[2];
210 unsigned i;
211
212 for (i = 0; i < 3; ++i)
213 csc[i] = ureg_DECL_constant(shader, i);
214
215 lumakey = ureg_DECL_constant(shader, 3);
216
217 for (i = 0; i < 2; ++i)
218 temp[i] = ureg_DECL_temporary(shader);
219
220 ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W),
221 ureg_imm1f(shader, 1.0f));
222
223 for (i = 0; i < 3; ++i)
224 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i],
225 ureg_src(texel));
226
227 ureg_MOV(shader, ureg_writemask(temp[0], TGSI_WRITEMASK_W),
228 ureg_scalar(ureg_src(texel), TGSI_SWIZZLE_Z));
229 ureg_SLE(shader, ureg_writemask(temp[1], TGSI_WRITEMASK_W),
230 ureg_src(temp[0]), ureg_scalar(lumakey, TGSI_SWIZZLE_X));
231 ureg_SGT(shader, ureg_writemask(temp[0], TGSI_WRITEMASK_W),
232 ureg_src(temp[0]), ureg_scalar(lumakey, TGSI_SWIZZLE_Y));
233 ureg_MAX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W),
234 ureg_src(temp[0]), ureg_src(temp[1]));
235
236 for (i = 0; i < 2; ++i)
237 ureg_release_temporary(shader, temp[i]);
238 }
239
240 static void
create_frag_shader_yuv(struct ureg_program * shader,struct ureg_dst texel)241 create_frag_shader_yuv(struct ureg_program *shader, struct ureg_dst texel)
242 {
243 struct ureg_src tc;
244 struct ureg_src sampler[3];
245 unsigned i;
246
247 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
248 for (i = 0; i < 3; ++i) {
249 sampler[i] = ureg_DECL_sampler(shader, i);
250 ureg_DECL_sampler_view(shader, i, TGSI_TEXTURE_2D_ARRAY,
251 TGSI_RETURN_TYPE_FLOAT,
252 TGSI_RETURN_TYPE_FLOAT,
253 TGSI_RETURN_TYPE_FLOAT,
254 TGSI_RETURN_TYPE_FLOAT);
255 }
256
257 /*
258 * texel.xyz = tex(tc, sampler[i])
259 */
260 for (i = 0; i < 3; ++i)
261 ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D_ARRAY, tc, sampler[i]);
262 }
263
264 void *
create_frag_shader_video_buffer(struct vl_compositor * c)265 create_frag_shader_video_buffer(struct vl_compositor *c)
266 {
267 struct ureg_program *shader;
268 struct ureg_dst texel;
269 struct ureg_dst fragment;
270
271 shader = ureg_create(PIPE_SHADER_FRAGMENT);
272 if (!shader)
273 return false;
274
275 texel = ureg_DECL_temporary(shader);
276 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
277
278 create_frag_shader_yuv(shader, texel);
279 create_frag_shader_csc(shader, texel, fragment);
280
281 ureg_release_temporary(shader, texel);
282 ureg_END(shader);
283
284 return ureg_create_shader_and_destroy(shader, c->pipe);
285 }
286
287 void *
create_frag_shader_weave_rgb(struct vl_compositor * c)288 create_frag_shader_weave_rgb(struct vl_compositor *c)
289 {
290 struct ureg_program *shader;
291 struct ureg_dst texel, fragment;
292
293 shader = ureg_create(PIPE_SHADER_FRAGMENT);
294 if (!shader)
295 return false;
296
297 texel = ureg_DECL_temporary(shader);
298 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
299
300 create_frag_shader_weave(shader, texel);
301 create_frag_shader_csc(shader, texel, fragment);
302
303 ureg_release_temporary(shader, texel);
304
305 ureg_END(shader);
306
307 return ureg_create_shader_and_destroy(shader, c->pipe);
308 }
309
310 void *
create_frag_shader_deint_yuv(struct vl_compositor * c,bool y,bool w)311 create_frag_shader_deint_yuv(struct vl_compositor *c, bool y, bool w)
312 {
313 struct ureg_program *shader;
314 struct ureg_dst texel, fragment;
315
316 shader = ureg_create(PIPE_SHADER_FRAGMENT);
317 if (!shader)
318 return false;
319
320 texel = ureg_DECL_temporary(shader);
321 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
322
323 if (w)
324 create_frag_shader_weave(shader, texel);
325 else
326 create_frag_shader_yuv(shader, texel);
327
328 if (y)
329 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X), ureg_src(texel));
330 else
331 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XY),
332 ureg_swizzle(ureg_src(texel), TGSI_SWIZZLE_Y,
333 TGSI_SWIZZLE_Z, TGSI_SWIZZLE_W, TGSI_SWIZZLE_W));
334
335 ureg_release_temporary(shader, texel);
336
337 ureg_END(shader);
338
339 return ureg_create_shader_and_destroy(shader, c->pipe);
340 }
341
342 void *
create_frag_shader_palette(struct vl_compositor * c,bool include_cc)343 create_frag_shader_palette(struct vl_compositor *c, bool include_cc)
344 {
345 struct ureg_program *shader;
346 struct ureg_src csc[3];
347 struct ureg_src tc;
348 struct ureg_src sampler;
349 struct ureg_src palette;
350 struct ureg_dst texel;
351 struct ureg_dst fragment;
352 unsigned i;
353
354 shader = ureg_create(PIPE_SHADER_FRAGMENT);
355 if (!shader)
356 return false;
357
358 for (i = 0; include_cc && i < 3; ++i)
359 csc[i] = ureg_DECL_constant(shader, i);
360
361 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
362 sampler = ureg_DECL_sampler(shader, 0);
363 ureg_DECL_sampler_view(shader, 0, TGSI_TEXTURE_2D,
364 TGSI_RETURN_TYPE_FLOAT,
365 TGSI_RETURN_TYPE_FLOAT,
366 TGSI_RETURN_TYPE_FLOAT,
367 TGSI_RETURN_TYPE_FLOAT);
368 palette = ureg_DECL_sampler(shader, 1);
369 ureg_DECL_sampler_view(shader, 1, TGSI_TEXTURE_1D,
370 TGSI_RETURN_TYPE_FLOAT,
371 TGSI_RETURN_TYPE_FLOAT,
372 TGSI_RETURN_TYPE_FLOAT,
373 TGSI_RETURN_TYPE_FLOAT);
374
375 texel = ureg_DECL_temporary(shader);
376 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
377
378 /*
379 * texel = tex(tc, sampler)
380 * fragment.xyz = tex(texel, palette) * csc
381 * fragment.a = texel.a
382 */
383 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
384 ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
385
386 if (include_cc) {
387 ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
388 for (i = 0; i < 3; ++i)
389 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
390 } else {
391 ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ),
392 TGSI_TEXTURE_1D, ureg_src(texel), palette);
393 }
394
395 ureg_release_temporary(shader, texel);
396 ureg_END(shader);
397
398 return ureg_create_shader_and_destroy(shader, c->pipe);
399 }
400
401 void *
create_frag_shader_rgba(struct vl_compositor * c)402 create_frag_shader_rgba(struct vl_compositor *c)
403 {
404 struct ureg_program *shader;
405 struct ureg_src tc, color, sampler;
406 struct ureg_dst texel, fragment;
407
408 shader = ureg_create(PIPE_SHADER_FRAGMENT);
409 if (!shader)
410 return false;
411
412 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
413 color = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_COLOR, VS_O_COLOR, TGSI_INTERPOLATE_LINEAR);
414 sampler = ureg_DECL_sampler(shader, 0);
415 ureg_DECL_sampler_view(shader, 0, TGSI_TEXTURE_2D,
416 TGSI_RETURN_TYPE_FLOAT,
417 TGSI_RETURN_TYPE_FLOAT,
418 TGSI_RETURN_TYPE_FLOAT,
419 TGSI_RETURN_TYPE_FLOAT);
420 texel = ureg_DECL_temporary(shader);
421 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
422
423 /*
424 * fragment = tex(tc, sampler)
425 */
426 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
427 ureg_MUL(shader, fragment, ureg_src(texel), color);
428 ureg_END(shader);
429
430 return ureg_create_shader_and_destroy(shader, c->pipe);
431 }
432
433 void *
create_frag_shader_rgb_yuv(struct vl_compositor * c,bool y)434 create_frag_shader_rgb_yuv(struct vl_compositor *c, bool y)
435 {
436 struct ureg_program *shader;
437 struct ureg_src tc, sampler;
438 struct ureg_dst texel, fragment;
439
440 struct ureg_src csc[3];
441 unsigned i;
442
443 shader = ureg_create(PIPE_SHADER_FRAGMENT);
444 if (!shader)
445 return false;
446
447 for (i = 0; i < 3; ++i)
448 csc[i] = ureg_DECL_constant(shader, i);
449
450 sampler = ureg_DECL_sampler(shader, 0);
451 tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
452 texel = ureg_DECL_temporary(shader);
453 fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
454
455 ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
456
457 if (y) {
458 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X), csc[0], ureg_src(texel));
459 } else {
460 for (i = 0; i < 2; ++i)
461 ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i + 1], ureg_src(texel));
462 }
463
464 ureg_release_temporary(shader, texel);
465 ureg_END(shader);
466
467 return ureg_create_shader_and_destroy(shader, c->pipe);
468 }
469
470 static void
gen_rect_verts(struct vertex2f * vb,struct vl_compositor_layer * layer)471 gen_rect_verts(struct vertex2f *vb, struct vl_compositor_layer *layer)
472 {
473 struct vertex2f tl, tr, br, bl;
474
475 assert(vb && layer);
476
477 switch (layer->rotate) {
478 default:
479 case VL_COMPOSITOR_ROTATE_0:
480 tl = layer->dst.tl;
481 tr.x = layer->dst.br.x;
482 tr.y = layer->dst.tl.y;
483 br = layer->dst.br;
484 bl.x = layer->dst.tl.x;
485 bl.y = layer->dst.br.y;
486 break;
487 case VL_COMPOSITOR_ROTATE_90:
488 tl.x = layer->dst.br.x;
489 tl.y = layer->dst.tl.y;
490 tr = layer->dst.br;
491 br.x = layer->dst.tl.x;
492 br.y = layer->dst.br.y;
493 bl = layer->dst.tl;
494 break;
495 case VL_COMPOSITOR_ROTATE_180:
496 tl = layer->dst.br;
497 tr.x = layer->dst.tl.x;
498 tr.y = layer->dst.br.y;
499 br = layer->dst.tl;
500 bl.x = layer->dst.br.x;
501 bl.y = layer->dst.tl.y;
502 break;
503 case VL_COMPOSITOR_ROTATE_270:
504 tl.x = layer->dst.tl.x;
505 tl.y = layer->dst.br.y;
506 tr = layer->dst.tl;
507 br.x = layer->dst.br.x;
508 br.y = layer->dst.tl.y;
509 bl = layer->dst.br;
510 break;
511 }
512
513 vb[ 0].x = tl.x;
514 vb[ 0].y = tl.y;
515 vb[ 1].x = layer->src.tl.x;
516 vb[ 1].y = layer->src.tl.y;
517 vb[ 2] = layer->zw;
518 vb[ 3].x = layer->colors[0].x;
519 vb[ 3].y = layer->colors[0].y;
520 vb[ 4].x = layer->colors[0].z;
521 vb[ 4].y = layer->colors[0].w;
522
523 vb[ 5].x = tr.x;
524 vb[ 5].y = tr.y;
525 vb[ 6].x = layer->src.br.x;
526 vb[ 6].y = layer->src.tl.y;
527 vb[ 7] = layer->zw;
528 vb[ 8].x = layer->colors[1].x;
529 vb[ 8].y = layer->colors[1].y;
530 vb[ 9].x = layer->colors[1].z;
531 vb[ 9].y = layer->colors[1].w;
532
533 vb[10].x = br.x;
534 vb[10].y = br.y;
535 vb[11].x = layer->src.br.x;
536 vb[11].y = layer->src.br.y;
537 vb[12] = layer->zw;
538 vb[13].x = layer->colors[2].x;
539 vb[13].y = layer->colors[2].y;
540 vb[14].x = layer->colors[2].z;
541 vb[14].y = layer->colors[2].w;
542
543 vb[15].x = bl.x;
544 vb[15].y = bl.y;
545 vb[16].x = layer->src.tl.x;
546 vb[16].y = layer->src.br.y;
547 vb[17] = layer->zw;
548 vb[18].x = layer->colors[3].x;
549 vb[18].y = layer->colors[3].y;
550 vb[19].x = layer->colors[3].z;
551 vb[19].y = layer->colors[3].w;
552 }
553
554 static inline struct u_rect
calc_drawn_area(struct vl_compositor_state * s,struct vl_compositor_layer * layer)555 calc_drawn_area(struct vl_compositor_state *s, struct vl_compositor_layer *layer)
556 {
557 struct vertex2f tl, br;
558 struct u_rect result;
559
560 assert(s && layer);
561
562 // rotate
563 switch (layer->rotate) {
564 default:
565 case VL_COMPOSITOR_ROTATE_0:
566 tl = layer->dst.tl;
567 br = layer->dst.br;
568 break;
569 case VL_COMPOSITOR_ROTATE_90:
570 tl.x = layer->dst.br.x;
571 tl.y = layer->dst.tl.y;
572 br.x = layer->dst.tl.x;
573 br.y = layer->dst.br.y;
574 break;
575 case VL_COMPOSITOR_ROTATE_180:
576 tl = layer->dst.br;
577 br = layer->dst.tl;
578 break;
579 case VL_COMPOSITOR_ROTATE_270:
580 tl.x = layer->dst.tl.x;
581 tl.y = layer->dst.br.y;
582 br.x = layer->dst.br.x;
583 br.y = layer->dst.tl.y;
584 break;
585 }
586
587 // scale
588 result.x0 = tl.x * layer->viewport.scale[0] + layer->viewport.translate[0];
589 result.y0 = tl.y * layer->viewport.scale[1] + layer->viewport.translate[1];
590 result.x1 = br.x * layer->viewport.scale[0] + layer->viewport.translate[0];
591 result.y1 = br.y * layer->viewport.scale[1] + layer->viewport.translate[1];
592
593 // and clip
594 result.x0 = MAX2(result.x0, s->scissor.minx);
595 result.y0 = MAX2(result.y0, s->scissor.miny);
596 result.x1 = MIN2(result.x1, s->scissor.maxx);
597 result.y1 = MIN2(result.y1, s->scissor.maxy);
598 return result;
599 }
600
601 static void
gen_vertex_data(struct vl_compositor * c,struct vl_compositor_state * s,struct u_rect * dirty)602 gen_vertex_data(struct vl_compositor *c, struct vl_compositor_state *s, struct u_rect *dirty)
603 {
604 struct vertex2f *vb;
605 unsigned i;
606
607 assert(c);
608
609 /* Allocate new memory for vertices. */
610 u_upload_alloc(c->pipe->stream_uploader, 0,
611 c->vertex_buf.stride * VL_COMPOSITOR_MAX_LAYERS * 4, /* size */
612 4, /* alignment */
613 &c->vertex_buf.buffer_offset, &c->vertex_buf.buffer.resource,
614 (void **)&vb);
615
616 for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
617 if (s->used_layers & (1 << i)) {
618 struct vl_compositor_layer *layer = &s->layers[i];
619 gen_rect_verts(vb, layer);
620 vb += 20;
621
622 if (!layer->viewport_valid) {
623 layer->viewport.scale[0] = c->fb_state.width;
624 layer->viewport.scale[1] = c->fb_state.height;
625 layer->viewport.translate[0] = 0;
626 layer->viewport.translate[1] = 0;
627 }
628
629 if (dirty && layer->clearing) {
630 struct u_rect drawn = calc_drawn_area(s, layer);
631 if (
632 dirty->x0 >= drawn.x0 &&
633 dirty->y0 >= drawn.y0 &&
634 dirty->x1 <= drawn.x1 &&
635 dirty->y1 <= drawn.y1) {
636
637 // We clear the dirty area anyway, no need for clear_render_target
638 dirty->x0 = dirty->y0 = VL_COMPOSITOR_MAX_DIRTY;
639 dirty->x1 = dirty->y1 = VL_COMPOSITOR_MIN_DIRTY;
640 }
641 }
642 }
643 }
644
645 u_upload_unmap(c->pipe->stream_uploader);
646 }
647
648 static void
draw_layers(struct vl_compositor * c,struct vl_compositor_state * s,struct u_rect * dirty)649 draw_layers(struct vl_compositor *c, struct vl_compositor_state *s, struct u_rect *dirty)
650 {
651 unsigned vb_index, i;
652
653 assert(c);
654
655 for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
656 if (s->used_layers & (1 << i)) {
657 struct vl_compositor_layer *layer = &s->layers[i];
658 struct pipe_sampler_view **samplers = &layer->sampler_views[0];
659 unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
660 void *blend = layer->blend ? layer->blend : i ? c->blend_add : c->blend_clear;
661
662 c->pipe->bind_blend_state(c->pipe, blend);
663 c->pipe->set_viewport_states(c->pipe, 0, 1, &layer->viewport);
664 c->pipe->bind_fs_state(c->pipe, layer->fs);
665 c->pipe->bind_sampler_states(c->pipe, PIPE_SHADER_FRAGMENT, 0,
666 num_sampler_views, layer->samplers);
667 c->pipe->set_sampler_views(c->pipe, PIPE_SHADER_FRAGMENT, 0,
668 num_sampler_views, 0, false, samplers);
669
670 util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
671 vb_index++;
672
673 if (dirty) {
674 // Remember the currently drawn area as dirty for the next draw command
675 struct u_rect drawn = calc_drawn_area(s, layer);
676 dirty->x0 = MIN2(drawn.x0, dirty->x0);
677 dirty->y0 = MIN2(drawn.y0, dirty->y0);
678 dirty->x1 = MAX2(drawn.x1, dirty->x1);
679 dirty->y1 = MAX2(drawn.y1, dirty->y1);
680 }
681 }
682 }
683 }
684
685 void
vl_compositor_gfx_render(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_surface * dst_surface,struct u_rect * dirty_area,bool clear_dirty)686 vl_compositor_gfx_render(struct vl_compositor_state *s,
687 struct vl_compositor *c,
688 struct pipe_surface *dst_surface,
689 struct u_rect *dirty_area,
690 bool clear_dirty)
691 {
692 assert(c);
693 assert(dst_surface);
694
695 c->fb_state.width = dst_surface->width;
696 c->fb_state.height = dst_surface->height;
697 c->fb_state.cbufs[0] = dst_surface;
698
699 if (!s->scissor_valid) {
700 s->scissor.minx = 0;
701 s->scissor.miny = 0;
702 s->scissor.maxx = dst_surface->width;
703 s->scissor.maxy = dst_surface->height;
704 }
705 c->pipe->set_scissor_states(c->pipe, 0, 1, &s->scissor);
706
707 gen_vertex_data(c, s, dirty_area);
708
709 if (clear_dirty && dirty_area &&
710 (dirty_area->x0 < dirty_area->x1 || dirty_area->y0 < dirty_area->y1)) {
711
712 c->pipe->clear_render_target(c->pipe, dst_surface, &s->clear_color,
713 0, 0, dst_surface->width, dst_surface->height, false);
714 dirty_area->x0 = dirty_area->y0 = VL_COMPOSITOR_MAX_DIRTY;
715 dirty_area->x1 = dirty_area->y1 = VL_COMPOSITOR_MIN_DIRTY;
716 }
717
718 c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
719 c->pipe->bind_vs_state(c->pipe, c->vs);
720 c->pipe->set_vertex_buffers(c->pipe, 0, 1, 0, false, &c->vertex_buf);
721 c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
722 pipe_set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, s->shader_params);
723 c->pipe->bind_rasterizer_state(c->pipe, c->rast);
724
725 draw_layers(c, s, dirty_area);
726 }
727