• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**************************************************************************
2  *
3  * Copyright 2009 Younes Manton.
4  * All Rights Reserved.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the
8  * "Software"), to deal in the Software without restriction, including
9  * without limitation the rights to use, copy, modify, merge, publish,
10  * distribute, sub license, and/or sell copies of the Software, and to
11  * permit persons to whom the Software is furnished to do so, subject to
12  * the following conditions:
13  *
14  * The above copyright notice and this permission notice (including the
15  * next paragraph) shall be included in all copies or substantial portions
16  * of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25  *
26  **************************************************************************/
27 
28 #include <assert.h>
29 
30 #include "pipe/p_compiler.h"
31 #include "pipe/p_context.h"
32 
33 #include "util/u_memory.h"
34 #include "util/u_draw.h"
35 #include "util/u_surface.h"
36 #include "util/u_upload_mgr.h"
37 #include "util/u_sampler.h"
38 
39 #include "tgsi/tgsi_ureg.h"
40 
41 #include "vl_csc.h"
42 #include "vl_types.h"
43 #include "vl_compositor.h"
44 
45 #define MIN_DIRTY (0)
46 #define MAX_DIRTY (1 << 15)
47 
48 enum VS_OUTPUT
49 {
50    VS_O_VPOS = 0,
51    VS_O_COLOR = 0,
52    VS_O_VTEX = 0,
53    VS_O_VTOP,
54    VS_O_VBOTTOM,
55 };
56 
57 static void *
create_vert_shader(struct vl_compositor * c)58 create_vert_shader(struct vl_compositor *c)
59 {
60    struct ureg_program *shader;
61    struct ureg_src vpos, vtex, color;
62    struct ureg_dst tmp;
63    struct ureg_dst o_vpos, o_vtex, o_color;
64    struct ureg_dst o_vtop, o_vbottom;
65 
66    shader = ureg_create(PIPE_SHADER_VERTEX);
67    if (!shader)
68       return false;
69 
70    vpos = ureg_DECL_vs_input(shader, 0);
71    vtex = ureg_DECL_vs_input(shader, 1);
72    color = ureg_DECL_vs_input(shader, 2);
73    tmp = ureg_DECL_temporary(shader);
74    o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS);
75    o_color = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, VS_O_COLOR);
76    o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX);
77    o_vtop = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP);
78    o_vbottom = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM);
79 
80    /*
81     * o_vpos = vpos
82     * o_vtex = vtex
83     * o_color = color
84     */
85    ureg_MOV(shader, o_vpos, vpos);
86    ureg_MOV(shader, o_vtex, vtex);
87    ureg_MOV(shader, o_color, color);
88 
89    /*
90     * tmp.x = vtex.w / 2
91     * tmp.y = vtex.w / 4
92     *
93     * o_vtop.x = vtex.x
94     * o_vtop.y = vtex.y * tmp.x + 0.25f
95     * o_vtop.z = vtex.y * tmp.y + 0.25f
96     * o_vtop.w = 1 / tmp.x
97     *
98     * o_vbottom.x = vtex.x
99     * o_vbottom.y = vtex.y * tmp.x - 0.25f
100     * o_vbottom.z = vtex.y * tmp.y - 0.25f
101     * o_vbottom.w = 1 / tmp.y
102     */
103    ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_X),
104             ureg_scalar(vtex, TGSI_SWIZZLE_W), ureg_imm1f(shader, 0.5f));
105    ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y),
106             ureg_scalar(vtex, TGSI_SWIZZLE_W), ureg_imm1f(shader, 0.25f));
107 
108    ureg_MOV(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_X), vtex);
109    ureg_MAD(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_Y), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
110             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X), ureg_imm1f(shader, 0.25f));
111    ureg_MAD(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_Z), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
112             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), ureg_imm1f(shader, 0.25f));
113    ureg_RCP(shader, ureg_writemask(o_vtop, TGSI_WRITEMASK_W),
114             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X));
115 
116    ureg_MOV(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_X), vtex);
117    ureg_MAD(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_Y), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
118             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_X), ureg_imm1f(shader, -0.25f));
119    ureg_MAD(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_Z), ureg_scalar(vtex, TGSI_SWIZZLE_Y),
120             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), ureg_imm1f(shader, -0.25f));
121    ureg_RCP(shader, ureg_writemask(o_vbottom, TGSI_WRITEMASK_W),
122             ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y));
123 
124    ureg_END(shader);
125 
126    return ureg_create_shader_and_destroy(shader, c->pipe);
127 }
128 
129 static void
create_frag_shader_weave(struct ureg_program * shader,struct ureg_dst fragment)130 create_frag_shader_weave(struct ureg_program *shader, struct ureg_dst fragment)
131 {
132    struct ureg_src i_tc[2];
133    struct ureg_src sampler[3];
134    struct ureg_dst t_tc[2];
135    struct ureg_dst t_texel[2];
136    unsigned i, j;
137 
138    i_tc[0] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP, TGSI_INTERPOLATE_LINEAR);
139    i_tc[1] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM, TGSI_INTERPOLATE_LINEAR);
140 
141    for (i = 0; i < 3; ++i) {
142       sampler[i] = ureg_DECL_sampler(shader, i);
143       ureg_DECL_sampler_view(shader, i, TGSI_TEXTURE_2D_ARRAY,
144                              TGSI_RETURN_TYPE_FLOAT,
145                              TGSI_RETURN_TYPE_FLOAT,
146                              TGSI_RETURN_TYPE_FLOAT,
147                              TGSI_RETURN_TYPE_FLOAT);
148    }
149 
150    for (i = 0; i < 2; ++i) {
151       t_tc[i] = ureg_DECL_temporary(shader);
152       t_texel[i] = ureg_DECL_temporary(shader);
153    }
154 
155    /* calculate the texture offsets
156     * t_tc.x = i_tc.x
157     * t_tc.y = (round(i_tc.y - 0.5) + 0.5) / height * 2
158     */
159    for (i = 0; i < 2; ++i) {
160       ureg_MOV(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_X), i_tc[i]);
161       ureg_ADD(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ),
162                i_tc[i], ureg_imm1f(shader, -0.5f));
163       ureg_ROUND(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ), ureg_src(t_tc[i]));
164       ureg_MOV(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_W),
165                ureg_imm1f(shader, i ? 1.0f : 0.0f));
166       ureg_ADD(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_YZ),
167                ureg_src(t_tc[i]), ureg_imm1f(shader, 0.5f));
168       ureg_MUL(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_Y),
169                ureg_src(t_tc[i]), ureg_scalar(i_tc[0], TGSI_SWIZZLE_W));
170       ureg_MUL(shader, ureg_writemask(t_tc[i], TGSI_WRITEMASK_Z),
171                ureg_src(t_tc[i]), ureg_scalar(i_tc[1], TGSI_SWIZZLE_W));
172    }
173 
174    /* fetch the texels
175     * texel[0..1].x = tex(t_tc[0..1][0])
176     * texel[0..1].y = tex(t_tc[0..1][1])
177     * texel[0..1].z = tex(t_tc[0..1][2])
178     */
179    for (i = 0; i < 2; ++i)
180       for (j = 0; j < 3; ++j) {
181          struct ureg_src src = ureg_swizzle(ureg_src(t_tc[i]),
182             TGSI_SWIZZLE_X, j ? TGSI_SWIZZLE_Z : TGSI_SWIZZLE_Y, TGSI_SWIZZLE_W, TGSI_SWIZZLE_W);
183 
184          ureg_TEX(shader, ureg_writemask(t_texel[i], TGSI_WRITEMASK_X << j),
185                   TGSI_TEXTURE_2D_ARRAY, src, sampler[j]);
186       }
187 
188    /* calculate linear interpolation factor
189     * factor = |round(i_tc.y) - i_tc.y| * 2
190     */
191    ureg_ROUND(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ), i_tc[0]);
192    ureg_ADD(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ),
193             ureg_src(t_tc[0]), ureg_negate(i_tc[0]));
194    ureg_MUL(shader, ureg_writemask(t_tc[0], TGSI_WRITEMASK_YZ),
195             ureg_abs(ureg_src(t_tc[0])), ureg_imm1f(shader, 2.0f));
196    ureg_LRP(shader, fragment, ureg_swizzle(ureg_src(t_tc[0]),
197             TGSI_SWIZZLE_Y, TGSI_SWIZZLE_Z, TGSI_SWIZZLE_Z, TGSI_SWIZZLE_Z),
198             ureg_src(t_texel[0]), ureg_src(t_texel[1]));
199 
200    for (i = 0; i < 2; ++i) {
201       ureg_release_temporary(shader, t_texel[i]);
202       ureg_release_temporary(shader, t_tc[i]);
203    }
204 }
205 
206 static void
create_frag_shader_csc(struct ureg_program * shader,struct ureg_dst texel,struct ureg_dst fragment)207 create_frag_shader_csc(struct ureg_program *shader, struct ureg_dst texel,
208 		       struct ureg_dst fragment)
209 {
210    struct ureg_src csc[3];
211    struct ureg_src lumakey;
212    struct ureg_dst temp[2];
213    unsigned i;
214 
215    for (i = 0; i < 3; ++i)
216       csc[i] = ureg_DECL_constant(shader, i);
217 
218    lumakey = ureg_DECL_constant(shader, 3);
219 
220    for (i = 0; i < 2; ++i)
221       temp[i] = ureg_DECL_temporary(shader);
222 
223    ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W),
224 	    ureg_imm1f(shader, 1.0f));
225 
226    for (i = 0; i < 3; ++i)
227       ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i],
228 	       ureg_src(texel));
229 
230    ureg_MOV(shader, ureg_writemask(temp[0], TGSI_WRITEMASK_W),
231             ureg_scalar(ureg_src(texel), TGSI_SWIZZLE_Z));
232    ureg_SLE(shader, ureg_writemask(temp[1],TGSI_WRITEMASK_W),
233             ureg_src(temp[0]), ureg_scalar(lumakey, TGSI_SWIZZLE_X));
234    ureg_SGT(shader, ureg_writemask(temp[0],TGSI_WRITEMASK_W),
235             ureg_src(temp[0]), ureg_scalar(lumakey, TGSI_SWIZZLE_Y));
236    ureg_MAX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W),
237             ureg_src(temp[0]), ureg_src(temp[1]));
238 
239    for (i = 0; i < 2; ++i)
240        ureg_release_temporary(shader, temp[i]);
241 }
242 
243 static void
create_frag_shader_yuv(struct ureg_program * shader,struct ureg_dst texel)244 create_frag_shader_yuv(struct ureg_program *shader, struct ureg_dst texel)
245 {
246    struct ureg_src tc;
247    struct ureg_src sampler[3];
248    unsigned i;
249 
250    tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
251    for (i = 0; i < 3; ++i) {
252       sampler[i] = ureg_DECL_sampler(shader, i);
253       ureg_DECL_sampler_view(shader, i, TGSI_TEXTURE_2D_ARRAY,
254                              TGSI_RETURN_TYPE_FLOAT,
255                              TGSI_RETURN_TYPE_FLOAT,
256                              TGSI_RETURN_TYPE_FLOAT,
257                              TGSI_RETURN_TYPE_FLOAT);
258    }
259 
260    /*
261     * texel.xyz = tex(tc, sampler[i])
262     */
263    for (i = 0; i < 3; ++i)
264       ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D_ARRAY, tc, sampler[i]);
265 }
266 
267 static void *
create_frag_shader_video_buffer(struct vl_compositor * c)268 create_frag_shader_video_buffer(struct vl_compositor *c)
269 {
270    struct ureg_program *shader;
271    struct ureg_dst texel;
272    struct ureg_dst fragment;
273 
274    shader = ureg_create(PIPE_SHADER_FRAGMENT);
275    if (!shader)
276       return false;
277 
278    texel = ureg_DECL_temporary(shader);
279    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
280 
281    create_frag_shader_yuv(shader, texel);
282    create_frag_shader_csc(shader, texel, fragment);
283 
284    ureg_release_temporary(shader, texel);
285    ureg_END(shader);
286 
287    return ureg_create_shader_and_destroy(shader, c->pipe);
288 }
289 
290 static void *
create_frag_shader_weave_rgb(struct vl_compositor * c)291 create_frag_shader_weave_rgb(struct vl_compositor *c)
292 {
293    struct ureg_program *shader;
294    struct ureg_dst texel, fragment;
295 
296    shader = ureg_create(PIPE_SHADER_FRAGMENT);
297    if (!shader)
298       return false;
299 
300    texel = ureg_DECL_temporary(shader);
301    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
302 
303    create_frag_shader_weave(shader, texel);
304    create_frag_shader_csc(shader, texel, fragment);
305 
306    ureg_release_temporary(shader, texel);
307 
308    ureg_END(shader);
309 
310    return ureg_create_shader_and_destroy(shader, c->pipe);
311 }
312 
313 static void *
create_frag_shader_deint_yuv(struct vl_compositor * c,bool y,bool w)314 create_frag_shader_deint_yuv(struct vl_compositor *c, bool y, bool w)
315 {
316    struct ureg_program *shader;
317    struct ureg_dst texel, fragment;
318 
319    shader = ureg_create(PIPE_SHADER_FRAGMENT);
320    if (!shader)
321       return false;
322 
323    texel = ureg_DECL_temporary(shader);
324    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
325 
326    if (w)
327       create_frag_shader_weave(shader, texel);
328    else
329       create_frag_shader_yuv(shader, texel);
330 
331    if (y)
332       ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X), ureg_src(texel));
333    else
334       ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XY),
335                        ureg_swizzle(ureg_src(texel), TGSI_SWIZZLE_Y,
336                                TGSI_SWIZZLE_Z, TGSI_SWIZZLE_W, TGSI_SWIZZLE_W));
337 
338    ureg_release_temporary(shader, texel);
339 
340    ureg_END(shader);
341 
342    return ureg_create_shader_and_destroy(shader, c->pipe);
343 }
344 
345 static void *
create_frag_shader_palette(struct vl_compositor * c,bool include_cc)346 create_frag_shader_palette(struct vl_compositor *c, bool include_cc)
347 {
348    struct ureg_program *shader;
349    struct ureg_src csc[3];
350    struct ureg_src tc;
351    struct ureg_src sampler;
352    struct ureg_src palette;
353    struct ureg_dst texel;
354    struct ureg_dst fragment;
355    unsigned i;
356 
357    shader = ureg_create(PIPE_SHADER_FRAGMENT);
358    if (!shader)
359       return false;
360 
361    for (i = 0; include_cc && i < 3; ++i)
362       csc[i] = ureg_DECL_constant(shader, i);
363 
364    tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
365    sampler = ureg_DECL_sampler(shader, 0);
366    ureg_DECL_sampler_view(shader, 0, TGSI_TEXTURE_2D,
367                           TGSI_RETURN_TYPE_FLOAT,
368                           TGSI_RETURN_TYPE_FLOAT,
369                           TGSI_RETURN_TYPE_FLOAT,
370                           TGSI_RETURN_TYPE_FLOAT);
371    palette = ureg_DECL_sampler(shader, 1);
372    ureg_DECL_sampler_view(shader, 1, TGSI_TEXTURE_1D,
373                           TGSI_RETURN_TYPE_FLOAT,
374                           TGSI_RETURN_TYPE_FLOAT,
375                           TGSI_RETURN_TYPE_FLOAT,
376                           TGSI_RETURN_TYPE_FLOAT);
377 
378    texel = ureg_DECL_temporary(shader);
379    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
380 
381    /*
382     * texel = tex(tc, sampler)
383     * fragment.xyz = tex(texel, palette) * csc
384     * fragment.a = texel.a
385     */
386    ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
387    ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
388 
389    if (include_cc) {
390       ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
391       for (i = 0; i < 3; ++i)
392          ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
393    } else {
394       ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ),
395                TGSI_TEXTURE_1D, ureg_src(texel), palette);
396    }
397 
398    ureg_release_temporary(shader, texel);
399    ureg_END(shader);
400 
401    return ureg_create_shader_and_destroy(shader, c->pipe);
402 }
403 
404 static void *
create_frag_shader_rgba(struct vl_compositor * c)405 create_frag_shader_rgba(struct vl_compositor *c)
406 {
407    struct ureg_program *shader;
408    struct ureg_src tc, color, sampler;
409    struct ureg_dst texel, fragment;
410 
411    shader = ureg_create(PIPE_SHADER_FRAGMENT);
412    if (!shader)
413       return false;
414 
415    tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
416    color = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_COLOR, VS_O_COLOR, TGSI_INTERPOLATE_LINEAR);
417    sampler = ureg_DECL_sampler(shader, 0);
418    ureg_DECL_sampler_view(shader, 0, TGSI_TEXTURE_2D,
419                           TGSI_RETURN_TYPE_FLOAT,
420                           TGSI_RETURN_TYPE_FLOAT,
421                           TGSI_RETURN_TYPE_FLOAT,
422                           TGSI_RETURN_TYPE_FLOAT);
423    texel = ureg_DECL_temporary(shader);
424    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
425 
426    /*
427     * fragment = tex(tc, sampler)
428     */
429    ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
430    ureg_MUL(shader, fragment, ureg_src(texel), color);
431    ureg_END(shader);
432 
433    return ureg_create_shader_and_destroy(shader, c->pipe);
434 }
435 
436 static void *
create_frag_shader_rgb_yuv(struct vl_compositor * c,bool y)437 create_frag_shader_rgb_yuv(struct vl_compositor *c, bool y)
438 {
439    struct ureg_program *shader;
440    struct ureg_src tc, sampler;
441    struct ureg_dst texel, fragment;
442 
443    struct ureg_src csc[3];
444    unsigned i;
445 
446    shader = ureg_create(PIPE_SHADER_FRAGMENT);
447    if (!shader)
448       return false;
449 
450    for (i = 0; i < 3; ++i)
451       csc[i] = ureg_DECL_constant(shader, i);
452 
453    sampler = ureg_DECL_sampler(shader, 0);
454    tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTEX, TGSI_INTERPOLATE_LINEAR);
455    texel = ureg_DECL_temporary(shader);
456    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
457 
458    ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
459 
460    if (y) {
461       ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X), csc[0], ureg_src(texel));
462    } else {
463       for (i = 0; i < 2; ++i)
464          ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i + 1], ureg_src(texel));
465    }
466 
467    ureg_release_temporary(shader, texel);
468    ureg_END(shader);
469 
470    return ureg_create_shader_and_destroy(shader, c->pipe);
471 }
472 
473 static bool
init_shaders(struct vl_compositor * c)474 init_shaders(struct vl_compositor *c)
475 {
476    assert(c);
477 
478    c->vs = create_vert_shader(c);
479    if (!c->vs) {
480       debug_printf("Unable to create vertex shader.\n");
481       return false;
482    }
483 
484    c->fs_video_buffer = create_frag_shader_video_buffer(c);
485    if (!c->fs_video_buffer) {
486       debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
487       return false;
488    }
489 
490    c->fs_weave_rgb = create_frag_shader_weave_rgb(c);
491    if (!c->fs_weave_rgb) {
492       debug_printf("Unable to create YCbCr-to-RGB weave fragment shader.\n");
493       return false;
494    }
495 
496    c->fs_yuv.weave.y = create_frag_shader_deint_yuv(c, true, true);
497    c->fs_yuv.weave.uv = create_frag_shader_deint_yuv(c, false, true);
498    c->fs_yuv.bob.y = create_frag_shader_deint_yuv(c, true, false);
499    c->fs_yuv.bob.uv = create_frag_shader_deint_yuv(c, false, false);
500    if (!c->fs_yuv.weave.y || !c->fs_yuv.weave.uv ||
501        !c->fs_yuv.bob.y || !c->fs_yuv.bob.uv) {
502       debug_printf("Unable to create YCbCr i-to-YCbCr p deint fragment shader.\n");
503       return false;
504    }
505 
506    c->fs_palette.yuv = create_frag_shader_palette(c, true);
507    if (!c->fs_palette.yuv) {
508       debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
509       return false;
510    }
511 
512    c->fs_palette.rgb = create_frag_shader_palette(c, false);
513    if (!c->fs_palette.rgb) {
514       debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
515       return false;
516    }
517 
518    c->fs_rgba = create_frag_shader_rgba(c);
519    if (!c->fs_rgba) {
520       debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
521       return false;
522    }
523 
524    c->fs_rgb_yuv.y = create_frag_shader_rgb_yuv(c, true);
525    c->fs_rgb_yuv.uv = create_frag_shader_rgb_yuv(c, false);
526    if (!c->fs_rgb_yuv.y || !c->fs_rgb_yuv.uv) {
527       debug_printf("Unable to create RGB-to-YUV fragment shader.\n");
528       return false;
529    }
530 
531    return true;
532 }
533 
cleanup_shaders(struct vl_compositor * c)534 static void cleanup_shaders(struct vl_compositor *c)
535 {
536    assert(c);
537 
538    c->pipe->delete_vs_state(c->pipe, c->vs);
539    c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
540    c->pipe->delete_fs_state(c->pipe, c->fs_weave_rgb);
541    c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.y);
542    c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.uv);
543    c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.y);
544    c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.uv);
545    c->pipe->delete_fs_state(c->pipe, c->fs_palette.yuv);
546    c->pipe->delete_fs_state(c->pipe, c->fs_palette.rgb);
547    c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
548    c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.y);
549    c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.uv);
550 }
551 
552 static bool
init_pipe_state(struct vl_compositor * c)553 init_pipe_state(struct vl_compositor *c)
554 {
555    struct pipe_rasterizer_state rast;
556    struct pipe_sampler_state sampler;
557    struct pipe_blend_state blend;
558    struct pipe_depth_stencil_alpha_state dsa;
559    unsigned i;
560 
561    assert(c);
562 
563    c->fb_state.nr_cbufs = 1;
564    c->fb_state.zsbuf = NULL;
565 
566    memset(&sampler, 0, sizeof(sampler));
567    sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
568    sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
569    sampler.wrap_r = PIPE_TEX_WRAP_REPEAT;
570    sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
571    sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
572    sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
573    sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
574    sampler.compare_func = PIPE_FUNC_ALWAYS;
575    sampler.normalized_coords = 1;
576 
577    c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
578 
579    sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
580    sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
581    c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
582 
583    memset(&blend, 0, sizeof blend);
584    blend.independent_blend_enable = 0;
585    blend.rt[0].blend_enable = 0;
586    blend.logicop_enable = 0;
587    blend.logicop_func = PIPE_LOGICOP_CLEAR;
588    blend.rt[0].colormask = PIPE_MASK_RGBA;
589    blend.dither = 0;
590    c->blend_clear = c->pipe->create_blend_state(c->pipe, &blend);
591 
592    blend.rt[0].blend_enable = 1;
593    blend.rt[0].rgb_func = PIPE_BLEND_ADD;
594    blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
595    blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
596    blend.rt[0].alpha_func = PIPE_BLEND_ADD;
597    blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
598    blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
599    c->blend_add = c->pipe->create_blend_state(c->pipe, &blend);
600 
601    memset(&rast, 0, sizeof rast);
602    rast.flatshade = 0;
603    rast.front_ccw = 1;
604    rast.cull_face = PIPE_FACE_NONE;
605    rast.fill_back = PIPE_POLYGON_MODE_FILL;
606    rast.fill_front = PIPE_POLYGON_MODE_FILL;
607    rast.scissor = 1;
608    rast.line_width = 1;
609    rast.point_size_per_vertex = 1;
610    rast.offset_units = 1;
611    rast.offset_scale = 1;
612    rast.half_pixel_center = 1;
613    rast.bottom_edge_rule = 1;
614    rast.depth_clip = 1;
615 
616    c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
617 
618    memset(&dsa, 0, sizeof dsa);
619    dsa.depth.enabled = 0;
620    dsa.depth.writemask = 0;
621    dsa.depth.func = PIPE_FUNC_ALWAYS;
622    for (i = 0; i < 2; ++i) {
623       dsa.stencil[i].enabled = 0;
624       dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
625       dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
626       dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
627       dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
628       dsa.stencil[i].valuemask = 0;
629       dsa.stencil[i].writemask = 0;
630    }
631    dsa.alpha.enabled = 0;
632    dsa.alpha.func = PIPE_FUNC_ALWAYS;
633    dsa.alpha.ref_value = 0;
634    c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
635    c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
636 
637    return true;
638 }
639 
cleanup_pipe_state(struct vl_compositor * c)640 static void cleanup_pipe_state(struct vl_compositor *c)
641 {
642    assert(c);
643 
644    /* Asserted in softpipe_delete_fs_state() for some reason */
645    c->pipe->bind_vs_state(c->pipe, NULL);
646    c->pipe->bind_fs_state(c->pipe, NULL);
647 
648    c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
649    c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
650    c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
651    c->pipe->delete_blend_state(c->pipe, c->blend_clear);
652    c->pipe->delete_blend_state(c->pipe, c->blend_add);
653    c->pipe->delete_rasterizer_state(c->pipe, c->rast);
654 }
655 
656 static bool
init_buffers(struct vl_compositor * c)657 init_buffers(struct vl_compositor *c)
658 {
659    struct pipe_vertex_element vertex_elems[3];
660 
661    assert(c);
662 
663    /*
664     * Create our vertex buffer and vertex buffer elements
665     */
666    c->vertex_buf.stride = sizeof(struct vertex2f) + sizeof(struct vertex4f) * 2;
667    c->vertex_buf.buffer_offset = 0;
668    c->vertex_buf.buffer.resource = NULL;
669    c->vertex_buf.is_user_buffer = false;
670 
671    vertex_elems[0].src_offset = 0;
672    vertex_elems[0].instance_divisor = 0;
673    vertex_elems[0].vertex_buffer_index = 0;
674    vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
675    vertex_elems[1].src_offset = sizeof(struct vertex2f);
676    vertex_elems[1].instance_divisor = 0;
677    vertex_elems[1].vertex_buffer_index = 0;
678    vertex_elems[1].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
679    vertex_elems[2].src_offset = sizeof(struct vertex2f) + sizeof(struct vertex4f);
680    vertex_elems[2].instance_divisor = 0;
681    vertex_elems[2].vertex_buffer_index = 0;
682    vertex_elems[2].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
683    c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 3, vertex_elems);
684 
685    return true;
686 }
687 
688 static void
cleanup_buffers(struct vl_compositor * c)689 cleanup_buffers(struct vl_compositor *c)
690 {
691    assert(c);
692 
693    c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
694    pipe_resource_reference(&c->vertex_buf.buffer.resource, NULL);
695 }
696 
697 static inline struct u_rect
default_rect(struct vl_compositor_layer * layer)698 default_rect(struct vl_compositor_layer *layer)
699 {
700    struct pipe_resource *res = layer->sampler_views[0]->texture;
701    struct u_rect rect = { 0, res->width0, 0, res->height0 * res->array_size };
702    return rect;
703 }
704 
705 static inline struct vertex2f
calc_topleft(struct vertex2f size,struct u_rect rect)706 calc_topleft(struct vertex2f size, struct u_rect rect)
707 {
708    struct vertex2f res = { rect.x0 / size.x, rect.y0 / size.y };
709    return res;
710 }
711 
712 static inline struct vertex2f
calc_bottomright(struct vertex2f size,struct u_rect rect)713 calc_bottomright(struct vertex2f size, struct u_rect rect)
714 {
715    struct vertex2f res = { rect.x1 / size.x, rect.y1 / size.y };
716    return res;
717 }
718 
719 static inline void
calc_src_and_dst(struct vl_compositor_layer * layer,unsigned width,unsigned height,struct u_rect src,struct u_rect dst)720 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
721                  struct u_rect src, struct u_rect dst)
722 {
723    struct vertex2f size =  { width, height };
724 
725    layer->src.tl = calc_topleft(size, src);
726    layer->src.br = calc_bottomright(size, src);
727    layer->dst.tl = calc_topleft(size, dst);
728    layer->dst.br = calc_bottomright(size, dst);
729    layer->zw.x = 0.0f;
730    layer->zw.y = size.y;
731 }
732 
733 static void
gen_rect_verts(struct vertex2f * vb,struct vl_compositor_layer * layer)734 gen_rect_verts(struct vertex2f *vb, struct vl_compositor_layer *layer)
735 {
736    struct vertex2f tl, tr, br, bl;
737 
738    assert(vb && layer);
739 
740    switch (layer->rotate) {
741    default:
742    case VL_COMPOSITOR_ROTATE_0:
743       tl = layer->dst.tl;
744       tr.x = layer->dst.br.x;
745       tr.y = layer->dst.tl.y;
746       br = layer->dst.br;
747       bl.x = layer->dst.tl.x;
748       bl.y = layer->dst.br.y;
749       break;
750    case VL_COMPOSITOR_ROTATE_90:
751       tl.x = layer->dst.br.x;
752       tl.y = layer->dst.tl.y;
753       tr = layer->dst.br;
754       br.x = layer->dst.tl.x;
755       br.y = layer->dst.br.y;
756       bl = layer->dst.tl;
757       break;
758    case VL_COMPOSITOR_ROTATE_180:
759       tl = layer->dst.br;
760       tr.x = layer->dst.tl.x;
761       tr.y = layer->dst.br.y;
762       br = layer->dst.tl;
763       bl.x = layer->dst.br.x;
764       bl.y = layer->dst.tl.y;
765       break;
766    case VL_COMPOSITOR_ROTATE_270:
767       tl.x = layer->dst.tl.x;
768       tl.y = layer->dst.br.y;
769       tr = layer->dst.tl;
770       br.x = layer->dst.br.x;
771       br.y = layer->dst.tl.y;
772       bl = layer->dst.br;
773       break;
774    }
775 
776    vb[ 0].x = tl.x;
777    vb[ 0].y = tl.y;
778    vb[ 1].x = layer->src.tl.x;
779    vb[ 1].y = layer->src.tl.y;
780    vb[ 2] = layer->zw;
781    vb[ 3].x = layer->colors[0].x;
782    vb[ 3].y = layer->colors[0].y;
783    vb[ 4].x = layer->colors[0].z;
784    vb[ 4].y = layer->colors[0].w;
785 
786    vb[ 5].x = tr.x;
787    vb[ 5].y = tr.y;
788    vb[ 6].x = layer->src.br.x;
789    vb[ 6].y = layer->src.tl.y;
790    vb[ 7] = layer->zw;
791    vb[ 8].x = layer->colors[1].x;
792    vb[ 8].y = layer->colors[1].y;
793    vb[ 9].x = layer->colors[1].z;
794    vb[ 9].y = layer->colors[1].w;
795 
796    vb[10].x = br.x;
797    vb[10].y = br.y;
798    vb[11].x = layer->src.br.x;
799    vb[11].y = layer->src.br.y;
800    vb[12] = layer->zw;
801    vb[13].x = layer->colors[2].x;
802    vb[13].y = layer->colors[2].y;
803    vb[14].x = layer->colors[2].z;
804    vb[14].y = layer->colors[2].w;
805 
806    vb[15].x = bl.x;
807    vb[15].y = bl.y;
808    vb[16].x = layer->src.tl.x;
809    vb[16].y = layer->src.br.y;
810    vb[17] = layer->zw;
811    vb[18].x = layer->colors[3].x;
812    vb[18].y = layer->colors[3].y;
813    vb[19].x = layer->colors[3].z;
814    vb[19].y = layer->colors[3].w;
815 }
816 
817 static inline struct u_rect
calc_drawn_area(struct vl_compositor_state * s,struct vl_compositor_layer * layer)818 calc_drawn_area(struct vl_compositor_state *s, struct vl_compositor_layer *layer)
819 {
820    struct vertex2f tl, br;
821    struct u_rect result;
822 
823    assert(s && layer);
824 
825    // rotate
826    switch (layer->rotate) {
827    default:
828    case VL_COMPOSITOR_ROTATE_0:
829       tl = layer->dst.tl;
830       br = layer->dst.br;
831       break;
832    case VL_COMPOSITOR_ROTATE_90:
833       tl.x = layer->dst.br.x;
834       tl.y = layer->dst.tl.y;
835       br.x = layer->dst.tl.x;
836       br.y = layer->dst.br.y;
837       break;
838    case VL_COMPOSITOR_ROTATE_180:
839       tl = layer->dst.br;
840       br = layer->dst.tl;
841       break;
842    case VL_COMPOSITOR_ROTATE_270:
843       tl.x = layer->dst.tl.x;
844       tl.y = layer->dst.br.y;
845       br.x = layer->dst.br.x;
846       br.y = layer->dst.tl.y;
847       break;
848    }
849 
850    // scale
851    result.x0 = tl.x * layer->viewport.scale[0] + layer->viewport.translate[0];
852    result.y0 = tl.y * layer->viewport.scale[1] + layer->viewport.translate[1];
853    result.x1 = br.x * layer->viewport.scale[0] + layer->viewport.translate[0];
854    result.y1 = br.y * layer->viewport.scale[1] + layer->viewport.translate[1];
855 
856    // and clip
857    result.x0 = MAX2(result.x0, s->scissor.minx);
858    result.y0 = MAX2(result.y0, s->scissor.miny);
859    result.x1 = MIN2(result.x1, s->scissor.maxx);
860    result.y1 = MIN2(result.y1, s->scissor.maxy);
861    return result;
862 }
863 
864 static void
gen_vertex_data(struct vl_compositor * c,struct vl_compositor_state * s,struct u_rect * dirty)865 gen_vertex_data(struct vl_compositor *c, struct vl_compositor_state *s, struct u_rect *dirty)
866 {
867    struct vertex2f *vb;
868    unsigned i;
869 
870    assert(c);
871 
872    /* Allocate new memory for vertices. */
873    u_upload_alloc(c->pipe->stream_uploader, 0,
874                   c->vertex_buf.stride * VL_COMPOSITOR_MAX_LAYERS * 4, /* size */
875                   4, /* alignment */
876                   &c->vertex_buf.buffer_offset, &c->vertex_buf.buffer.resource,
877                   (void**)&vb);
878 
879    for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
880       if (s->used_layers & (1 << i)) {
881          struct vl_compositor_layer *layer = &s->layers[i];
882          gen_rect_verts(vb, layer);
883          vb += 20;
884 
885          if (!layer->viewport_valid) {
886             layer->viewport.scale[0] = c->fb_state.width;
887             layer->viewport.scale[1] = c->fb_state.height;
888             layer->viewport.translate[0] = 0;
889             layer->viewport.translate[1] = 0;
890          }
891 
892          if (dirty && layer->clearing) {
893             struct u_rect drawn = calc_drawn_area(s, layer);
894             if (
895              dirty->x0 >= drawn.x0 &&
896              dirty->y0 >= drawn.y0 &&
897              dirty->x1 <= drawn.x1 &&
898              dirty->y1 <= drawn.y1) {
899 
900                // We clear the dirty area anyway, no need for clear_render_target
901                dirty->x0 = dirty->y0 = MAX_DIRTY;
902                dirty->x1 = dirty->y1 = MIN_DIRTY;
903             }
904          }
905       }
906    }
907 
908    u_upload_unmap(c->pipe->stream_uploader);
909 }
910 
911 static void
draw_layers(struct vl_compositor * c,struct vl_compositor_state * s,struct u_rect * dirty)912 draw_layers(struct vl_compositor *c, struct vl_compositor_state *s, struct u_rect *dirty)
913 {
914    unsigned vb_index, i;
915 
916    assert(c);
917 
918    for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
919       if (s->used_layers & (1 << i)) {
920          struct vl_compositor_layer *layer = &s->layers[i];
921          struct pipe_sampler_view **samplers = &layer->sampler_views[0];
922          unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
923          void *blend = layer->blend ? layer->blend : i ? c->blend_add : c->blend_clear;
924 
925          c->pipe->bind_blend_state(c->pipe, blend);
926          c->pipe->set_viewport_states(c->pipe, 0, 1, &layer->viewport);
927          c->pipe->bind_fs_state(c->pipe, layer->fs);
928          c->pipe->bind_sampler_states(c->pipe, PIPE_SHADER_FRAGMENT, 0,
929                                       num_sampler_views, layer->samplers);
930          c->pipe->set_sampler_views(c->pipe, PIPE_SHADER_FRAGMENT, 0,
931                                     num_sampler_views, samplers);
932 
933          util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
934          vb_index++;
935 
936          if (dirty) {
937             // Remember the currently drawn area as dirty for the next draw command
938             struct u_rect drawn = calc_drawn_area(s, layer);
939             dirty->x0 = MIN2(drawn.x0, dirty->x0);
940             dirty->y0 = MIN2(drawn.y0, dirty->y0);
941             dirty->x1 = MAX2(drawn.x1, dirty->x1);
942             dirty->y1 = MAX2(drawn.y1, dirty->y1);
943          }
944       }
945    }
946 }
947 
948 static void
set_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,bool y,enum vl_compositor_deinterlace deinterlace)949 set_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
950               unsigned layer, struct pipe_video_buffer *buffer,
951               struct u_rect *src_rect, struct u_rect *dst_rect,
952               bool y, enum vl_compositor_deinterlace deinterlace)
953 {
954    struct pipe_sampler_view **sampler_views;
955    float half_a_line;
956    unsigned i;
957 
958    assert(s && c && buffer);
959 
960    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
961 
962    s->used_layers |= 1 << layer;
963    sampler_views = buffer->get_sampler_view_components(buffer);
964    for (i = 0; i < 3; ++i) {
965       s->layers[layer].samplers[i] = c->sampler_linear;
966       pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
967    }
968 
969    calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
970                     src_rect ? *src_rect : default_rect(&s->layers[layer]),
971                     dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
972 
973    half_a_line = 0.5f / s->layers[layer].zw.y;
974 
975    switch(deinterlace) {
976    case VL_COMPOSITOR_BOB_TOP:
977       s->layers[layer].zw.x = 0.0f;
978       s->layers[layer].src.tl.y += half_a_line;
979       s->layers[layer].src.br.y += half_a_line;
980       s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
981       break;
982 
983    case VL_COMPOSITOR_BOB_BOTTOM:
984       s->layers[layer].zw.x = 1.0f;
985       s->layers[layer].src.tl.y -= half_a_line;
986       s->layers[layer].src.br.y -= half_a_line;
987       s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
988       break;
989 
990    default:
991       s->layers[layer].fs = (y) ? c->fs_yuv.weave.y : c->fs_yuv.weave.uv;
992       break;
993    }
994 }
995 
996 static void
set_rgb_to_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * v,struct u_rect * src_rect,struct u_rect * dst_rect,bool y)997 set_rgb_to_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
998                      unsigned layer, struct pipe_sampler_view *v,
999                      struct u_rect *src_rect, struct u_rect *dst_rect, bool y)
1000 {
1001    vl_csc_matrix csc_matrix;
1002 
1003    assert(s && c && v);
1004 
1005    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1006 
1007    s->used_layers |= 1 << layer;
1008 
1009    s->layers[layer].fs = y? c->fs_rgb_yuv.y : c->fs_rgb_yuv.uv;
1010 
1011    vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_BT_709_REV, NULL, false, &csc_matrix);
1012    vl_compositor_set_csc_matrix(s, (const vl_csc_matrix *)&csc_matrix, 1.0f, 0.0f);
1013 
1014    s->layers[layer].samplers[0] = c->sampler_linear;
1015    s->layers[layer].samplers[1] = NULL;
1016    s->layers[layer].samplers[2] = NULL;
1017 
1018    pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], v);
1019    pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
1020    pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
1021 
1022    calc_src_and_dst(&s->layers[layer], v->texture->width0, v->texture->height0,
1023                     src_rect ? *src_rect : default_rect(&s->layers[layer]),
1024                     dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
1025 }
1026 
1027 void
vl_compositor_reset_dirty_area(struct u_rect * dirty)1028 vl_compositor_reset_dirty_area(struct u_rect *dirty)
1029 {
1030    assert(dirty);
1031 
1032    dirty->x0 = dirty->y0 = MIN_DIRTY;
1033    dirty->x1 = dirty->y1 = MAX_DIRTY;
1034 }
1035 
1036 void
vl_compositor_set_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)1037 vl_compositor_set_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
1038 {
1039    assert(s);
1040    assert(color);
1041 
1042    s->clear_color = *color;
1043 }
1044 
1045 void
vl_compositor_get_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)1046 vl_compositor_get_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
1047 {
1048    assert(s);
1049    assert(color);
1050 
1051    *color = s->clear_color;
1052 }
1053 
1054 void
vl_compositor_clear_layers(struct vl_compositor_state * s)1055 vl_compositor_clear_layers(struct vl_compositor_state *s)
1056 {
1057    unsigned i, j;
1058 
1059    assert(s);
1060 
1061    s->used_layers = 0;
1062    for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
1063       struct vertex4f v_one = { 1.0f, 1.0f, 1.0f, 1.0f };
1064       s->layers[i].clearing = i ? false : true;
1065       s->layers[i].blend = NULL;
1066       s->layers[i].fs = NULL;
1067       s->layers[i].viewport.scale[2] = 1;
1068       s->layers[i].viewport.translate[2] = 0;
1069       s->layers[i].rotate = VL_COMPOSITOR_ROTATE_0;
1070 
1071       for ( j = 0; j < 3; j++)
1072          pipe_sampler_view_reference(&s->layers[i].sampler_views[j], NULL);
1073       for ( j = 0; j < 4; ++j)
1074          s->layers[i].colors[j] = v_one;
1075    }
1076 }
1077 
1078 void
vl_compositor_cleanup(struct vl_compositor * c)1079 vl_compositor_cleanup(struct vl_compositor *c)
1080 {
1081    assert(c);
1082 
1083    cleanup_buffers(c);
1084    cleanup_shaders(c);
1085    cleanup_pipe_state(c);
1086 }
1087 
1088 bool
vl_compositor_set_csc_matrix(struct vl_compositor_state * s,vl_csc_matrix const * matrix,float luma_min,float luma_max)1089 vl_compositor_set_csc_matrix(struct vl_compositor_state *s,
1090                              vl_csc_matrix const *matrix,
1091                              float luma_min, float luma_max)
1092 {
1093    struct pipe_transfer *buf_transfer;
1094 
1095    assert(s);
1096 
1097    float *ptr = pipe_buffer_map(s->pipe, s->csc_matrix,
1098                                PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_RANGE,
1099                                &buf_transfer);
1100 
1101    if (!ptr)
1102       return false;
1103 
1104    memcpy(ptr, matrix, sizeof(vl_csc_matrix));
1105 
1106    ptr += sizeof(vl_csc_matrix)/sizeof(float);
1107    ptr[0] = luma_min;
1108    ptr[1] = luma_max;
1109 
1110    pipe_buffer_unmap(s->pipe, buf_transfer);
1111 
1112    return true;
1113 }
1114 
1115 void
vl_compositor_set_dst_clip(struct vl_compositor_state * s,struct u_rect * dst_clip)1116 vl_compositor_set_dst_clip(struct vl_compositor_state *s, struct u_rect *dst_clip)
1117 {
1118    assert(s);
1119 
1120    s->scissor_valid = dst_clip != NULL;
1121    if (dst_clip) {
1122       s->scissor.minx = dst_clip->x0;
1123       s->scissor.miny = dst_clip->y0;
1124       s->scissor.maxx = dst_clip->x1;
1125       s->scissor.maxy = dst_clip->y1;
1126    }
1127 }
1128 
1129 void
vl_compositor_set_layer_blend(struct vl_compositor_state * s,unsigned layer,void * blend,bool is_clearing)1130 vl_compositor_set_layer_blend(struct vl_compositor_state *s,
1131                               unsigned layer, void *blend,
1132                               bool is_clearing)
1133 {
1134    assert(s && blend);
1135 
1136    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1137 
1138    s->layers[layer].clearing = is_clearing;
1139    s->layers[layer].blend = blend;
1140 }
1141 
1142 void
vl_compositor_set_layer_dst_area(struct vl_compositor_state * s,unsigned layer,struct u_rect * dst_area)1143 vl_compositor_set_layer_dst_area(struct vl_compositor_state *s,
1144                                  unsigned layer, struct u_rect *dst_area)
1145 {
1146    assert(s);
1147 
1148    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1149 
1150    s->layers[layer].viewport_valid = dst_area != NULL;
1151    if (dst_area) {
1152       s->layers[layer].viewport.scale[0] = dst_area->x1 - dst_area->x0;
1153       s->layers[layer].viewport.scale[1] = dst_area->y1 - dst_area->y0;
1154       s->layers[layer].viewport.translate[0] = dst_area->x0;
1155       s->layers[layer].viewport.translate[1] = dst_area->y0;
1156    }
1157 }
1158 
1159 void
vl_compositor_set_buffer_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)1160 vl_compositor_set_buffer_layer(struct vl_compositor_state *s,
1161                                struct vl_compositor *c,
1162                                unsigned layer,
1163                                struct pipe_video_buffer *buffer,
1164                                struct u_rect *src_rect,
1165                                struct u_rect *dst_rect,
1166                                enum vl_compositor_deinterlace deinterlace)
1167 {
1168    struct pipe_sampler_view **sampler_views;
1169    unsigned i;
1170 
1171    assert(s && c && buffer);
1172 
1173    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1174 
1175    s->used_layers |= 1 << layer;
1176    sampler_views = buffer->get_sampler_view_components(buffer);
1177    for (i = 0; i < 3; ++i) {
1178       s->layers[layer].samplers[i] = c->sampler_linear;
1179       pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
1180    }
1181 
1182    calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
1183                     src_rect ? *src_rect : default_rect(&s->layers[layer]),
1184                     dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
1185 
1186    if (buffer->interlaced) {
1187       float half_a_line = 0.5f / s->layers[layer].zw.y;
1188       switch(deinterlace) {
1189       case VL_COMPOSITOR_WEAVE:
1190          s->layers[layer].fs = c->fs_weave_rgb;
1191          break;
1192 
1193       case VL_COMPOSITOR_BOB_TOP:
1194          s->layers[layer].zw.x = 0.0f;
1195          s->layers[layer].src.tl.y += half_a_line;
1196          s->layers[layer].src.br.y += half_a_line;
1197          s->layers[layer].fs = c->fs_video_buffer;
1198          break;
1199 
1200       case VL_COMPOSITOR_BOB_BOTTOM:
1201          s->layers[layer].zw.x = 1.0f;
1202          s->layers[layer].src.tl.y -= half_a_line;
1203          s->layers[layer].src.br.y -= half_a_line;
1204          s->layers[layer].fs = c->fs_video_buffer;
1205          break;
1206       }
1207 
1208    } else
1209       s->layers[layer].fs = c->fs_video_buffer;
1210 }
1211 
1212 void
vl_compositor_set_palette_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * indexes,struct pipe_sampler_view * palette,struct u_rect * src_rect,struct u_rect * dst_rect,bool include_color_conversion)1213 vl_compositor_set_palette_layer(struct vl_compositor_state *s,
1214                                 struct vl_compositor *c,
1215                                 unsigned layer,
1216                                 struct pipe_sampler_view *indexes,
1217                                 struct pipe_sampler_view *palette,
1218                                 struct u_rect *src_rect,
1219                                 struct u_rect *dst_rect,
1220                                 bool include_color_conversion)
1221 {
1222    assert(s && c && indexes && palette);
1223 
1224    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1225 
1226    s->used_layers |= 1 << layer;
1227 
1228    s->layers[layer].fs = include_color_conversion ?
1229       c->fs_palette.yuv : c->fs_palette.rgb;
1230 
1231    s->layers[layer].samplers[0] = c->sampler_linear;
1232    s->layers[layer].samplers[1] = c->sampler_nearest;
1233    s->layers[layer].samplers[2] = NULL;
1234    pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], indexes);
1235    pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], palette);
1236    pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
1237    calc_src_and_dst(&s->layers[layer], indexes->texture->width0, indexes->texture->height0,
1238                     src_rect ? *src_rect : default_rect(&s->layers[layer]),
1239                     dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
1240 }
1241 
1242 void
vl_compositor_set_rgba_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * rgba,struct u_rect * src_rect,struct u_rect * dst_rect,struct vertex4f * colors)1243 vl_compositor_set_rgba_layer(struct vl_compositor_state *s,
1244                              struct vl_compositor *c,
1245                              unsigned layer,
1246                              struct pipe_sampler_view *rgba,
1247                              struct u_rect *src_rect,
1248                              struct u_rect *dst_rect,
1249                              struct vertex4f *colors)
1250 {
1251    unsigned i;
1252 
1253    assert(s && c && rgba);
1254 
1255    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1256 
1257    s->used_layers |= 1 << layer;
1258    s->layers[layer].fs = c->fs_rgba;
1259    s->layers[layer].samplers[0] = c->sampler_linear;
1260    s->layers[layer].samplers[1] = NULL;
1261    s->layers[layer].samplers[2] = NULL;
1262    pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], rgba);
1263    pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
1264    pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
1265    calc_src_and_dst(&s->layers[layer], rgba->texture->width0, rgba->texture->height0,
1266                     src_rect ? *src_rect : default_rect(&s->layers[layer]),
1267                     dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
1268 
1269    if (colors)
1270       for (i = 0; i < 4; ++i)
1271          s->layers[layer].colors[i] = colors[i];
1272 }
1273 
1274 void
vl_compositor_set_layer_rotation(struct vl_compositor_state * s,unsigned layer,enum vl_compositor_rotation rotate)1275 vl_compositor_set_layer_rotation(struct vl_compositor_state *s,
1276                                  unsigned layer,
1277                                  enum vl_compositor_rotation rotate)
1278 {
1279    assert(s);
1280    assert(layer < VL_COMPOSITOR_MAX_LAYERS);
1281    s->layers[layer].rotate = rotate;
1282 }
1283 
1284 void
vl_compositor_yuv_deint_full(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_video_buffer * src,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)1285 vl_compositor_yuv_deint_full(struct vl_compositor_state *s,
1286                              struct vl_compositor *c,
1287                              struct pipe_video_buffer *src,
1288                              struct pipe_video_buffer *dst,
1289                              struct u_rect *src_rect,
1290                              struct u_rect *dst_rect,
1291                              enum vl_compositor_deinterlace deinterlace)
1292 {
1293    struct pipe_surface **dst_surfaces;
1294 
1295    dst_surfaces = dst->get_surfaces(dst);
1296    vl_compositor_clear_layers(s);
1297 
1298    set_yuv_layer(s, c, 0, src, src_rect, NULL, true, deinterlace);
1299    vl_compositor_set_layer_dst_area(s, 0, dst_rect);
1300    vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
1301 
1302    if (dst_rect) {
1303       dst_rect->x1 /= 2;
1304       dst_rect->y1 /= 2;
1305    }
1306 
1307    set_yuv_layer(s, c, 0, src, src_rect, NULL, false, deinterlace);
1308    vl_compositor_set_layer_dst_area(s, 0, dst_rect);
1309    vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
1310 
1311    s->pipe->flush(s->pipe, NULL, 0);
1312 }
1313 
1314 void
vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_resource * src_res,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect)1315 vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state *s,
1316                                  struct vl_compositor *c,
1317                                  unsigned layer,
1318                                  struct pipe_resource *src_res,
1319                                  struct pipe_video_buffer *dst,
1320                                  struct u_rect *src_rect,
1321                                  struct u_rect *dst_rect)
1322 {
1323    struct pipe_sampler_view *sv, sv_templ;
1324    struct pipe_surface **dst_surfaces;
1325 
1326    dst_surfaces = dst->get_surfaces(dst);
1327 
1328    memset(&sv_templ, 0, sizeof(sv_templ));
1329    u_sampler_view_default_template(&sv_templ, src_res, src_res->format);
1330    sv = s->pipe->create_sampler_view(s->pipe, src_res, &sv_templ);
1331 
1332    vl_compositor_clear_layers(s);
1333 
1334    set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, true);
1335    vl_compositor_set_layer_dst_area(s, 0, dst_rect);
1336    vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
1337 
1338    if (dst_rect) {
1339       dst_rect->x1 /= 2;
1340       dst_rect->y1 /= 2;
1341    }
1342 
1343    set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, false);
1344    vl_compositor_set_layer_dst_area(s, 0, dst_rect);
1345    vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
1346    pipe_sampler_view_reference(&sv, NULL);
1347 
1348    s->pipe->flush(s->pipe, NULL, 0);
1349 }
1350 
1351 void
vl_compositor_render(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_surface * dst_surface,struct u_rect * dirty_area,bool clear_dirty)1352 vl_compositor_render(struct vl_compositor_state *s,
1353                      struct vl_compositor       *c,
1354                      struct pipe_surface        *dst_surface,
1355                      struct u_rect              *dirty_area,
1356                      bool                        clear_dirty)
1357 {
1358    assert(c);
1359    assert(dst_surface);
1360 
1361    c->fb_state.width = dst_surface->width;
1362    c->fb_state.height = dst_surface->height;
1363    c->fb_state.cbufs[0] = dst_surface;
1364 
1365    if (!s->scissor_valid) {
1366       s->scissor.minx = 0;
1367       s->scissor.miny = 0;
1368       s->scissor.maxx = dst_surface->width;
1369       s->scissor.maxy = dst_surface->height;
1370    }
1371    c->pipe->set_scissor_states(c->pipe, 0, 1, &s->scissor);
1372 
1373    gen_vertex_data(c, s, dirty_area);
1374 
1375    if (clear_dirty && dirty_area &&
1376        (dirty_area->x0 < dirty_area->x1 || dirty_area->y0 < dirty_area->y1)) {
1377 
1378       c->pipe->clear_render_target(c->pipe, dst_surface, &s->clear_color,
1379                                    0, 0, dst_surface->width, dst_surface->height, false);
1380       dirty_area->x0 = dirty_area->y0 = MAX_DIRTY;
1381       dirty_area->x1 = dirty_area->y1 = MIN_DIRTY;
1382    }
1383 
1384    c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
1385    c->pipe->bind_vs_state(c->pipe, c->vs);
1386    c->pipe->set_vertex_buffers(c->pipe, 0, 1, &c->vertex_buf);
1387    c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
1388    pipe_set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, s->csc_matrix);
1389    c->pipe->bind_rasterizer_state(c->pipe, c->rast);
1390 
1391    draw_layers(c, s, dirty_area);
1392 }
1393 
1394 bool
vl_compositor_init(struct vl_compositor * c,struct pipe_context * pipe)1395 vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe)
1396 {
1397    assert(c);
1398 
1399    memset(c, 0, sizeof(*c));
1400 
1401    c->pipe = pipe;
1402 
1403    if (!init_pipe_state(c)) {
1404       return false;
1405    }
1406 
1407    if (!init_shaders(c)) {
1408       cleanup_pipe_state(c);
1409       return false;
1410    }
1411 
1412    if (!init_buffers(c)) {
1413       cleanup_shaders(c);
1414       cleanup_pipe_state(c);
1415       return false;
1416    }
1417 
1418    return true;
1419 }
1420 
1421 bool
vl_compositor_init_state(struct vl_compositor_state * s,struct pipe_context * pipe)1422 vl_compositor_init_state(struct vl_compositor_state *s, struct pipe_context *pipe)
1423 {
1424    vl_csc_matrix csc_matrix;
1425 
1426    assert(s);
1427 
1428    memset(s, 0, sizeof(*s));
1429 
1430    s->pipe = pipe;
1431 
1432    s->clear_color.f[0] = s->clear_color.f[1] = 0.0f;
1433    s->clear_color.f[2] = s->clear_color.f[3] = 0.0f;
1434 
1435    /*
1436     * Create our fragment shader's constant buffer
1437     * Const buffer contains the color conversion matrix and bias vectors
1438     */
1439    /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
1440    s->csc_matrix = pipe_buffer_create
1441    (
1442       pipe->screen,
1443       PIPE_BIND_CONSTANT_BUFFER,
1444       PIPE_USAGE_DEFAULT,
1445       sizeof(csc_matrix) + 2*sizeof(float)
1446    );
1447 
1448    if (!s->csc_matrix)
1449       return false;
1450 
1451    vl_compositor_clear_layers(s);
1452 
1453    vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, &csc_matrix);
1454    if (!vl_compositor_set_csc_matrix(s, (const vl_csc_matrix *)&csc_matrix, 1.0f, 0.0f))
1455       return false;
1456 
1457    return true;
1458 }
1459 
1460 void
vl_compositor_cleanup_state(struct vl_compositor_state * s)1461 vl_compositor_cleanup_state(struct vl_compositor_state *s)
1462 {
1463    assert(s);
1464 
1465    vl_compositor_clear_layers(s);
1466    pipe_resource_reference(&s->csc_matrix, NULL);
1467 }
1468