1 /**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "util/u_sampler.h"
29
30 #include "vl_compositor_gfx.h"
31 #include "vl_compositor_cs.h"
32
33 static bool
init_shaders(struct vl_compositor * c)34 init_shaders(struct vl_compositor *c)
35 {
36 assert(c);
37
38 if (c->shaders_initialized)
39 return true;
40
41 if (c->pipe_cs_composit_supported) {
42 if (!vl_compositor_cs_init_shaders(c))
43 return false;
44
45 } else if (c->pipe_gfx_supported) {
46 c->fs_video_buffer = create_frag_shader_video_buffer(c);
47 if (!c->fs_video_buffer) {
48 debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
49 return false;
50 }
51
52 c->fs_weave_rgb = create_frag_shader_weave_rgb(c);
53 if (!c->fs_weave_rgb) {
54 debug_printf("Unable to create YCbCr-to-RGB weave fragment shader.\n");
55 return false;
56 }
57
58 c->fs_yuv.weave.y = create_frag_shader_deint_yuv(c, true, true);
59 c->fs_yuv.weave.uv = create_frag_shader_deint_yuv(c, false, true);
60 c->fs_yuv.bob.y = create_frag_shader_deint_yuv(c, true, false);
61 c->fs_yuv.bob.uv = create_frag_shader_deint_yuv(c, false, false);
62 if (!c->fs_yuv.weave.y || !c->fs_yuv.weave.uv ||
63 !c->fs_yuv.bob.y || !c->fs_yuv.bob.uv) {
64 debug_printf("Unable to create YCbCr i-to-YCbCr p deint fragment shader.\n");
65 return false;
66 }
67
68 c->fs_rgb_yuv.y = create_frag_shader_rgb_yuv(c, true);
69 c->fs_rgb_yuv.uv = create_frag_shader_rgb_yuv(c, false);
70 if (!c->fs_rgb_yuv.y || !c->fs_rgb_yuv.uv) {
71 debug_printf("Unable to create RGB-to-YUV fragment shader.\n");
72 return false;
73 }
74 }
75
76 if (c->pipe_gfx_supported) {
77 c->vs = create_vert_shader(c);
78 if (!c->vs) {
79 debug_printf("Unable to create vertex shader.\n");
80 return false;
81 }
82
83 c->fs_palette.yuv = create_frag_shader_palette(c, true);
84 if (!c->fs_palette.yuv) {
85 debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
86 return false;
87 }
88
89 c->fs_palette.rgb = create_frag_shader_palette(c, false);
90 if (!c->fs_palette.rgb) {
91 debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
92 return false;
93 }
94
95 c->fs_rgba = create_frag_shader_rgba(c);
96 if (!c->fs_rgba) {
97 debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
98 return false;
99 }
100 }
101
102 c->shaders_initialized = true;
103
104 return true;
105 }
106
cleanup_shaders(struct vl_compositor * c)107 static void cleanup_shaders(struct vl_compositor *c)
108 {
109 assert(c);
110
111 if (!c->shaders_initialized)
112 return;
113
114 if (c->pipe_cs_composit_supported) {
115 vl_compositor_cs_cleanup_shaders(c);
116 } else if (c->pipe_gfx_supported) {
117 c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
118 c->pipe->delete_fs_state(c->pipe, c->fs_weave_rgb);
119 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.y);
120 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.weave.uv);
121 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.y);
122 c->pipe->delete_fs_state(c->pipe, c->fs_yuv.bob.uv);
123 c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.y);
124 c->pipe->delete_fs_state(c->pipe, c->fs_rgb_yuv.uv);
125 }
126
127 if (c->pipe_gfx_supported) {
128 c->pipe->delete_vs_state(c->pipe, c->vs);
129 c->pipe->delete_fs_state(c->pipe, c->fs_palette.yuv);
130 c->pipe->delete_fs_state(c->pipe, c->fs_palette.rgb);
131 c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
132 }
133 }
134
135 static bool
init_pipe_state(struct vl_compositor * c)136 init_pipe_state(struct vl_compositor *c)
137 {
138 struct pipe_rasterizer_state rast;
139 struct pipe_sampler_state sampler;
140 struct pipe_blend_state blend;
141 struct pipe_depth_stencil_alpha_state dsa;
142 unsigned i;
143
144 assert(c);
145
146 c->fb_state.nr_cbufs = 1;
147 c->fb_state.zsbuf = NULL;
148
149 memset(&sampler, 0, sizeof(sampler));
150 sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
151 sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
152 sampler.wrap_r = PIPE_TEX_WRAP_REPEAT;
153 sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
154 sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
155 sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
156 sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
157 sampler.compare_func = PIPE_FUNC_ALWAYS;
158 c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
159
160 sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
161 sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
162 c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
163
164 if (c->pipe_gfx_supported) {
165 memset(&blend, 0, sizeof blend);
166 blend.independent_blend_enable = 0;
167 blend.rt[0].blend_enable = 0;
168 blend.logicop_enable = 0;
169 blend.logicop_func = PIPE_LOGICOP_CLEAR;
170 blend.rt[0].colormask = PIPE_MASK_RGBA;
171 blend.dither = 0;
172 c->blend_clear = c->pipe->create_blend_state(c->pipe, &blend);
173
174 blend.rt[0].blend_enable = 1;
175 blend.rt[0].rgb_func = PIPE_BLEND_ADD;
176 blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
177 blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
178 blend.rt[0].alpha_func = PIPE_BLEND_ADD;
179 blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
180 blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
181 c->blend_add = c->pipe->create_blend_state(c->pipe, &blend);
182
183 memset(&rast, 0, sizeof rast);
184 rast.flatshade = 0;
185 rast.front_ccw = 1;
186 rast.cull_face = PIPE_FACE_NONE;
187 rast.fill_back = PIPE_POLYGON_MODE_FILL;
188 rast.fill_front = PIPE_POLYGON_MODE_FILL;
189 rast.scissor = 1;
190 rast.line_width = 1;
191 rast.point_size_per_vertex = 1;
192 rast.offset_units = 1;
193 rast.offset_scale = 1;
194 rast.half_pixel_center = 1;
195 rast.bottom_edge_rule = 1;
196 rast.depth_clip_near = 1;
197 rast.depth_clip_far = 1;
198
199 c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
200
201 memset(&dsa, 0, sizeof dsa);
202 dsa.depth_enabled = 0;
203 dsa.depth_writemask = 0;
204 dsa.depth_func = PIPE_FUNC_ALWAYS;
205 for (i = 0; i < 2; ++i) {
206 dsa.stencil[i].enabled = 0;
207 dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
208 dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
209 dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
210 dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
211 dsa.stencil[i].valuemask = 0;
212 dsa.stencil[i].writemask = 0;
213 }
214 dsa.alpha_enabled = 0;
215 dsa.alpha_func = PIPE_FUNC_ALWAYS;
216 dsa.alpha_ref_value = 0;
217 c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
218 c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
219 }
220
221 return true;
222 }
223
cleanup_pipe_state(struct vl_compositor * c)224 static void cleanup_pipe_state(struct vl_compositor *c)
225 {
226 assert(c);
227
228 if (c->pipe_gfx_supported) {
229 /* Asserted in softpipe_delete_fs_state() for some reason */
230 c->pipe->bind_vs_state(c->pipe, NULL);
231 c->pipe->bind_fs_state(c->pipe, NULL);
232
233 c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
234 c->pipe->delete_blend_state(c->pipe, c->blend_clear);
235 c->pipe->delete_blend_state(c->pipe, c->blend_add);
236 c->pipe->delete_rasterizer_state(c->pipe, c->rast);
237 }
238 if (c->sampler_linear)
239 c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
240 if (c->sampler_nearest)
241 c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
242 }
243
244 static bool
init_buffers(struct vl_compositor * c)245 init_buffers(struct vl_compositor *c)
246 {
247 struct pipe_vertex_element vertex_elems[3];
248 memset(vertex_elems, 0, sizeof(vertex_elems));
249
250 assert(c);
251
252 /*
253 * Create our vertex buffer and vertex buffer elements
254 */
255 c->vertex_buf.buffer_offset = 0;
256 c->vertex_buf.buffer.resource = NULL;
257 c->vertex_buf.is_user_buffer = false;
258
259 if (c->pipe_gfx_supported) {
260 vertex_elems[0].src_offset = 0;
261 vertex_elems[0].src_stride = VL_COMPOSITOR_VB_STRIDE;
262 vertex_elems[0].instance_divisor = 0;
263 vertex_elems[0].vertex_buffer_index = 0;
264 vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
265 vertex_elems[1].src_offset = sizeof(struct vertex2f);
266 vertex_elems[1].src_stride = VL_COMPOSITOR_VB_STRIDE;
267 vertex_elems[1].instance_divisor = 0;
268 vertex_elems[1].vertex_buffer_index = 0;
269 vertex_elems[1].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
270 vertex_elems[2].src_offset = sizeof(struct vertex2f) + sizeof(struct vertex4f);
271 vertex_elems[2].src_stride = VL_COMPOSITOR_VB_STRIDE;
272 vertex_elems[2].instance_divisor = 0;
273 vertex_elems[2].vertex_buffer_index = 0;
274 vertex_elems[2].src_format = PIPE_FORMAT_R32G32B32A32_FLOAT;
275 c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 3, vertex_elems);
276 }
277
278 return true;
279 }
280
281 static void
cleanup_buffers(struct vl_compositor * c)282 cleanup_buffers(struct vl_compositor *c)
283 {
284 assert(c);
285
286 if (c->pipe_gfx_supported) {
287 c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
288 }
289 pipe_resource_reference(&c->vertex_buf.buffer.resource, NULL);
290 }
291
292 static inline struct u_rect
default_rect(struct vl_compositor_layer * layer)293 default_rect(struct vl_compositor_layer *layer)
294 {
295 struct pipe_resource *res = layer->sampler_views[0]->texture;
296 struct u_rect rect = { 0, res->width0, 0, res->height0 * res->array_size };
297 return rect;
298 }
299
300 static inline struct vertex2f
calc_topleft(struct vertex2f size,struct u_rect rect)301 calc_topleft(struct vertex2f size, struct u_rect rect)
302 {
303 struct vertex2f res = { rect.x0 / size.x, rect.y0 / size.y };
304 return res;
305 }
306
307 static inline struct vertex2f
calc_bottomright(struct vertex2f size,struct u_rect rect)308 calc_bottomright(struct vertex2f size, struct u_rect rect)
309 {
310 struct vertex2f res = { rect.x1 / size.x, rect.y1 / size.y };
311 return res;
312 }
313
314 static inline void
calc_src_and_dst(struct vl_compositor_layer * layer,unsigned width,unsigned height,struct u_rect src,struct u_rect dst)315 calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
316 struct u_rect src, struct u_rect dst)
317 {
318 struct vertex2f size = { width, height };
319
320 layer->src.tl = calc_topleft(size, src);
321 layer->src.br = calc_bottomright(size, src);
322 layer->dst.tl = calc_topleft(size, dst);
323 layer->dst.br = calc_bottomright(size, dst);
324 layer->zw.x = 0.0f;
325 layer->zw.y = size.y;
326 }
327
328 static void
set_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_plane plane,enum vl_compositor_deinterlace deinterlace)329 set_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
330 unsigned layer, struct pipe_video_buffer *buffer,
331 struct u_rect *src_rect, struct u_rect *dst_rect,
332 enum vl_compositor_plane plane,
333 enum vl_compositor_deinterlace deinterlace)
334 {
335 struct pipe_sampler_view **sampler_views;
336 float half_a_line;
337 unsigned i;
338 bool y = plane == VL_COMPOSITOR_PLANE_Y;
339
340 assert(s && c && buffer);
341
342 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
343
344 if (!init_shaders(c))
345 return;
346
347 s->used_layers |= 1 << layer;
348 sampler_views = buffer->get_sampler_view_components(buffer);
349 for (i = 0; i < 3; ++i) {
350 s->layers[layer].samplers[i] = c->sampler_linear;
351 pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
352 }
353
354 calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
355 src_rect ? *src_rect : default_rect(&s->layers[layer]),
356 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
357
358 half_a_line = 0.5f / s->layers[layer].zw.y;
359
360 switch(deinterlace) {
361 case VL_COMPOSITOR_BOB_TOP:
362 s->layers[layer].zw.x = 0.0f;
363 s->layers[layer].src.tl.y += half_a_line;
364 s->layers[layer].src.br.y += half_a_line;
365 if (c->pipe_gfx_supported)
366 s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
367 if (c->pipe_cs_composit_supported)
368 s->layers[layer].cs = (y) ? c->cs_yuv.progressive.y : c->cs_yuv.progressive.uv;
369 break;
370
371 case VL_COMPOSITOR_BOB_BOTTOM:
372 s->layers[layer].zw.x = 1.0f;
373 s->layers[layer].src.tl.y -= half_a_line;
374 s->layers[layer].src.br.y -= half_a_line;
375 if (c->pipe_gfx_supported)
376 s->layers[layer].fs = (y) ? c->fs_yuv.bob.y : c->fs_yuv.bob.uv;
377 if (c->pipe_cs_composit_supported)
378 s->layers[layer].cs = (y) ? c->cs_yuv.progressive.y : c->cs_yuv.progressive.uv;
379 break;
380
381 case VL_COMPOSITOR_NONE:
382 if (c->pipe_cs_composit_supported) {
383 if (plane == VL_COMPOSITOR_PLANE_Y)
384 s->layers[layer].cs = c->cs_yuv.progressive.y;
385 else if (plane == VL_COMPOSITOR_PLANE_U)
386 s->layers[layer].cs = c->cs_yuv.progressive.u;
387 else if (plane == VL_COMPOSITOR_PLANE_V)
388 s->layers[layer].cs = c->cs_yuv.progressive.v;
389 else if (plane == VL_COMPOSITOR_PLANE_UV)
390 s->layers[layer].cs = c->cs_yuv.progressive.uv;
391 break;
392 }
393 FALLTHROUGH;
394
395 default:
396 if (c->pipe_gfx_supported)
397 s->layers[layer].fs = (y) ? c->fs_yuv.weave.y : c->fs_yuv.weave.uv;
398 if (c->pipe_cs_composit_supported)
399 s->layers[layer].cs = (y) ? c->cs_yuv.weave.y : c->cs_yuv.weave.uv;
400 break;
401 }
402 }
403
404 static void
set_rgb_to_yuv_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * v,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_plane plane)405 set_rgb_to_yuv_layer(struct vl_compositor_state *s, struct vl_compositor *c,
406 unsigned layer, struct pipe_sampler_view *v,
407 struct u_rect *src_rect, struct u_rect *dst_rect,
408 enum vl_compositor_plane plane)
409 {
410 bool y = plane == VL_COMPOSITOR_PLANE_Y;
411
412 assert(s && c && v);
413 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
414
415 if (!init_shaders(c))
416 return;
417
418 s->used_layers |= 1 << layer;
419
420 if (c->pipe_cs_composit_supported) {
421 if (plane == VL_COMPOSITOR_PLANE_Y)
422 s->layers[layer].cs = c->cs_rgb_yuv.y;
423 else if (plane == VL_COMPOSITOR_PLANE_U)
424 s->layers[layer].cs = c->cs_rgb_yuv.u;
425 else if (plane == VL_COMPOSITOR_PLANE_V)
426 s->layers[layer].cs = c->cs_rgb_yuv.v;
427 else if (plane == VL_COMPOSITOR_PLANE_UV)
428 s->layers[layer].cs = c->cs_rgb_yuv.uv;
429 } else if (c->pipe_gfx_supported)
430 s->layers[layer].fs = y ? c->fs_rgb_yuv.y : c->fs_rgb_yuv.uv;
431
432 s->layers[layer].samplers[0] = c->sampler_linear;
433 s->layers[layer].samplers[1] = NULL;
434 s->layers[layer].samplers[2] = NULL;
435
436 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], v);
437 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
438 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
439
440 calc_src_and_dst(&s->layers[layer], v->texture->width0, v->texture->height0,
441 src_rect ? *src_rect : default_rect(&s->layers[layer]),
442 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
443 }
444
445 void
vl_compositor_reset_dirty_area(struct u_rect * dirty)446 vl_compositor_reset_dirty_area(struct u_rect *dirty)
447 {
448 assert(dirty);
449
450 dirty->x0 = dirty->y0 = VL_COMPOSITOR_MIN_DIRTY;
451 dirty->x1 = dirty->y1 = VL_COMPOSITOR_MAX_DIRTY;
452 }
453
454 void
vl_compositor_set_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)455 vl_compositor_set_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
456 {
457 assert(s);
458 assert(color);
459
460 s->clear_color = *color;
461 }
462
463 void
vl_compositor_get_clear_color(struct vl_compositor_state * s,union pipe_color_union * color)464 vl_compositor_get_clear_color(struct vl_compositor_state *s, union pipe_color_union *color)
465 {
466 assert(s);
467 assert(color);
468
469 *color = s->clear_color;
470 }
471
472 void
vl_compositor_clear_layers(struct vl_compositor_state * s)473 vl_compositor_clear_layers(struct vl_compositor_state *s)
474 {
475 unsigned i, j;
476
477 assert(s);
478 s->used_layers = 0;
479 for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
480 struct vertex4f v_one = { 1.0f, 1.0f, 1.0f, 1.0f };
481 s->layers[i].clearing = i ? false : true;
482 s->layers[i].blend = NULL;
483 s->layers[i].fs = NULL;
484 s->layers[i].cs = NULL;
485 s->layers[i].viewport.scale[2] = 1;
486 s->layers[i].viewport.translate[2] = 0;
487 s->layers[i].viewport.swizzle_x = PIPE_VIEWPORT_SWIZZLE_POSITIVE_X;
488 s->layers[i].viewport.swizzle_y = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Y;
489 s->layers[i].viewport.swizzle_z = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Z;
490 s->layers[i].viewport.swizzle_w = PIPE_VIEWPORT_SWIZZLE_POSITIVE_W;
491 s->layers[i].rotate = VL_COMPOSITOR_ROTATE_0;
492 s->layers[i].mirror = VL_COMPOSITOR_MIRROR_NONE;
493
494 for ( j = 0; j < 3; j++)
495 pipe_sampler_view_reference(&s->layers[i].sampler_views[j], NULL);
496 for ( j = 0; j < 4; ++j)
497 s->layers[i].colors[j] = v_one;
498 }
499 }
500
501 void
vl_compositor_cleanup(struct vl_compositor * c)502 vl_compositor_cleanup(struct vl_compositor *c)
503 {
504 assert(c);
505
506 cleanup_buffers(c);
507 cleanup_shaders(c);
508 cleanup_pipe_state(c);
509 }
510
511 bool
vl_compositor_set_csc_matrix(struct vl_compositor_state * s,vl_csc_matrix const * matrix,float luma_min,float luma_max)512 vl_compositor_set_csc_matrix(struct vl_compositor_state *s,
513 vl_csc_matrix const *matrix,
514 float luma_min, float luma_max)
515 {
516 assert(s);
517
518 memcpy(&s->csc_matrix, matrix, sizeof(vl_csc_matrix));
519 s->luma_min = luma_min;
520 s->luma_max = luma_max;
521
522 return true;
523 }
524
525 void
vl_compositor_set_dst_clip(struct vl_compositor_state * s,struct u_rect * dst_clip)526 vl_compositor_set_dst_clip(struct vl_compositor_state *s, struct u_rect *dst_clip)
527 {
528 assert(s);
529
530 s->scissor_valid = dst_clip != NULL;
531 if (dst_clip) {
532 s->scissor.minx = dst_clip->x0;
533 s->scissor.miny = dst_clip->y0;
534 s->scissor.maxx = dst_clip->x1;
535 s->scissor.maxy = dst_clip->y1;
536 }
537 }
538
539 void
vl_compositor_set_layer_blend(struct vl_compositor_state * s,unsigned layer,void * blend,bool is_clearing)540 vl_compositor_set_layer_blend(struct vl_compositor_state *s,
541 unsigned layer, void *blend,
542 bool is_clearing)
543 {
544 assert(s && blend);
545
546 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
547
548 s->layers[layer].clearing = is_clearing;
549 s->layers[layer].blend = blend;
550 }
551
552 void
vl_compositor_set_layer_dst_area(struct vl_compositor_state * s,unsigned layer,struct u_rect * dst_area)553 vl_compositor_set_layer_dst_area(struct vl_compositor_state *s,
554 unsigned layer, struct u_rect *dst_area)
555 {
556 assert(s);
557
558 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
559
560 s->layers[layer].viewport_valid = dst_area != NULL;
561 if (dst_area) {
562 s->layers[layer].viewport.scale[0] = dst_area->x1 - dst_area->x0;
563 s->layers[layer].viewport.scale[1] = dst_area->y1 - dst_area->y0;
564 s->layers[layer].viewport.translate[0] = dst_area->x0;
565 s->layers[layer].viewport.translate[1] = dst_area->y0;
566 }
567 }
568
569 void
vl_compositor_set_buffer_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_video_buffer * buffer,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)570 vl_compositor_set_buffer_layer(struct vl_compositor_state *s,
571 struct vl_compositor *c,
572 unsigned layer,
573 struct pipe_video_buffer *buffer,
574 struct u_rect *src_rect,
575 struct u_rect *dst_rect,
576 enum vl_compositor_deinterlace deinterlace)
577 {
578 struct pipe_sampler_view **sampler_views;
579 unsigned i;
580
581 assert(s && c && buffer);
582
583 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
584
585 if (!init_shaders(c))
586 return;
587
588 s->used_layers |= 1 << layer;
589 sampler_views = buffer->get_sampler_view_components(buffer);
590 for (i = 0; i < 3; ++i) {
591 s->layers[layer].samplers[i] = c->sampler_linear;
592 pipe_sampler_view_reference(&s->layers[layer].sampler_views[i], sampler_views[i]);
593 }
594
595 calc_src_and_dst(&s->layers[layer], buffer->width, buffer->height,
596 src_rect ? *src_rect : default_rect(&s->layers[layer]),
597 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
598
599 if (buffer->interlaced) {
600 float half_a_line = 0.5f / s->layers[layer].zw.y;
601 switch(deinterlace) {
602 case VL_COMPOSITOR_NONE:
603 case VL_COMPOSITOR_MOTION_ADAPTIVE:
604 case VL_COMPOSITOR_WEAVE:
605 if (c->pipe_cs_composit_supported)
606 s->layers[layer].cs = c->cs_weave_rgb;
607 else if (c->pipe_gfx_supported)
608 s->layers[layer].fs = c->fs_weave_rgb;
609 break;
610
611 case VL_COMPOSITOR_BOB_TOP:
612 s->layers[layer].zw.x = 0.0f;
613 s->layers[layer].src.tl.y += half_a_line;
614 s->layers[layer].src.br.y += half_a_line;
615 if (c->pipe_cs_composit_supported)
616 s->layers[layer].cs = c->cs_video_buffer;
617 else if (c->pipe_gfx_supported)
618 s->layers[layer].fs = c->fs_video_buffer;
619 break;
620
621 case VL_COMPOSITOR_BOB_BOTTOM:
622 s->layers[layer].zw.x = 1.0f;
623 s->layers[layer].src.tl.y -= half_a_line;
624 s->layers[layer].src.br.y -= half_a_line;
625 if (c->pipe_cs_composit_supported)
626 s->layers[layer].cs = c->cs_video_buffer;
627 else if (c->pipe_gfx_supported)
628 s->layers[layer].fs = c->fs_video_buffer;
629 break;
630 }
631
632 } else {
633 if (c->pipe_cs_composit_supported)
634 s->layers[layer].cs = c->cs_video_buffer;
635 else if (c->pipe_gfx_supported)
636 s->layers[layer].fs = c->fs_video_buffer;
637 }
638 }
639
640 void
vl_compositor_set_palette_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * indexes,struct pipe_sampler_view * palette,struct u_rect * src_rect,struct u_rect * dst_rect,bool include_color_conversion)641 vl_compositor_set_palette_layer(struct vl_compositor_state *s,
642 struct vl_compositor *c,
643 unsigned layer,
644 struct pipe_sampler_view *indexes,
645 struct pipe_sampler_view *palette,
646 struct u_rect *src_rect,
647 struct u_rect *dst_rect,
648 bool include_color_conversion)
649 {
650 assert(s && c && indexes && palette);
651
652 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
653
654 if (!init_shaders(c))
655 return;
656
657 s->used_layers |= 1 << layer;
658
659 s->layers[layer].fs = include_color_conversion ?
660 c->fs_palette.yuv : c->fs_palette.rgb;
661
662 s->layers[layer].samplers[0] = c->sampler_linear;
663 s->layers[layer].samplers[1] = c->sampler_nearest;
664 s->layers[layer].samplers[2] = NULL;
665 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], indexes);
666 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], palette);
667 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
668 calc_src_and_dst(&s->layers[layer], indexes->texture->width0, indexes->texture->height0,
669 src_rect ? *src_rect : default_rect(&s->layers[layer]),
670 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
671 }
672
673 void
vl_compositor_set_rgba_layer(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_sampler_view * rgba,struct u_rect * src_rect,struct u_rect * dst_rect,struct vertex4f * colors)674 vl_compositor_set_rgba_layer(struct vl_compositor_state *s,
675 struct vl_compositor *c,
676 unsigned layer,
677 struct pipe_sampler_view *rgba,
678 struct u_rect *src_rect,
679 struct u_rect *dst_rect,
680 struct vertex4f *colors)
681 {
682 unsigned i;
683
684 assert(s && c && rgba);
685
686 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
687
688 if (!init_shaders(c))
689 return;
690
691 s->used_layers |= 1 << layer;
692 if (c->fs_rgba)
693 s->layers[layer].fs = c->fs_rgba;
694 else if (c->cs_rgba)
695 s->layers[layer].cs = c->cs_rgba;
696 s->layers[layer].samplers[0] = c->sampler_linear;
697 s->layers[layer].samplers[1] = NULL;
698 s->layers[layer].samplers[2] = NULL;
699 pipe_sampler_view_reference(&s->layers[layer].sampler_views[0], rgba);
700 pipe_sampler_view_reference(&s->layers[layer].sampler_views[1], NULL);
701 pipe_sampler_view_reference(&s->layers[layer].sampler_views[2], NULL);
702 calc_src_and_dst(&s->layers[layer], rgba->texture->width0, rgba->texture->height0,
703 src_rect ? *src_rect : default_rect(&s->layers[layer]),
704 dst_rect ? *dst_rect : default_rect(&s->layers[layer]));
705
706 if (colors)
707 for (i = 0; i < 4; ++i)
708 s->layers[layer].colors[i] = colors[i];
709 }
710
711 void
vl_compositor_set_layer_rotation(struct vl_compositor_state * s,unsigned layer,enum vl_compositor_rotation rotate)712 vl_compositor_set_layer_rotation(struct vl_compositor_state *s,
713 unsigned layer,
714 enum vl_compositor_rotation rotate)
715 {
716 assert(s);
717 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
718 s->layers[layer].rotate = rotate;
719 }
720
721 void
vl_compositor_set_layer_mirror(struct vl_compositor_state * s,unsigned layer,enum vl_compositor_mirror mirror)722 vl_compositor_set_layer_mirror(struct vl_compositor_state *s,
723 unsigned layer,
724 enum vl_compositor_mirror mirror)
725 {
726 assert(s);
727 assert(layer < VL_COMPOSITOR_MAX_LAYERS);
728 s->layers[layer].mirror = mirror;
729 }
730
731 void
vl_compositor_yuv_deint_full(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_video_buffer * src,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect,enum vl_compositor_deinterlace deinterlace)732 vl_compositor_yuv_deint_full(struct vl_compositor_state *s,
733 struct vl_compositor *c,
734 struct pipe_video_buffer *src,
735 struct pipe_video_buffer *dst,
736 struct u_rect *src_rect,
737 struct u_rect *dst_rect,
738 enum vl_compositor_deinterlace deinterlace)
739 {
740 struct pipe_surface **dst_surfaces;
741
742 dst_surfaces = dst->get_surfaces(dst);
743
744 set_yuv_layer(s, c, 0, src, src_rect, NULL, VL_COMPOSITOR_PLANE_Y, deinterlace);
745 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
746 vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
747
748 if (dst_surfaces[1]) {
749 bool clear = util_format_get_nr_components(src->buffer_format) == 1;
750 union pipe_color_union clear_color = { .f = {0.5, 0.5} };
751 dst_rect->x0 = util_format_get_plane_width(dst->buffer_format, 1, dst_rect->x0);
752 dst_rect->x1 = util_format_get_plane_width(dst->buffer_format, 1, dst_rect->x1);
753 dst_rect->y0 = util_format_get_plane_height(dst->buffer_format, 1, dst_rect->y0);
754 dst_rect->y1 = util_format_get_plane_height(dst->buffer_format, 1, dst_rect->y1);
755 set_yuv_layer(s, c, 0, src, src_rect, NULL, dst_surfaces[2] ? VL_COMPOSITOR_PLANE_U :
756 VL_COMPOSITOR_PLANE_UV, deinterlace);
757 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
758 if (clear) {
759 struct u_rect clear_rect = *dst_rect;
760 s->used_layers = 0;
761 vl_compositor_set_clear_color(s, &clear_color);
762 vl_compositor_render(s, c, dst_surfaces[1], &clear_rect, true);
763 } else {
764 vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
765 }
766
767 if (dst_surfaces[2]) {
768 set_yuv_layer(s, c, 0, src, src_rect, NULL, VL_COMPOSITOR_PLANE_V, deinterlace);
769 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
770 if (clear) {
771 struct u_rect clear_rect = *dst_rect;
772 s->used_layers = 0;
773 vl_compositor_set_clear_color(s, &clear_color);
774 vl_compositor_render(s, c, dst_surfaces[2], &clear_rect, true);
775 } else {
776 vl_compositor_render(s, c, dst_surfaces[2], NULL, false);
777 }
778 }
779 }
780 }
781
782 void
vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state * s,struct vl_compositor * c,unsigned layer,struct pipe_resource * src_res,struct pipe_video_buffer * dst,struct u_rect * src_rect,struct u_rect * dst_rect)783 vl_compositor_convert_rgb_to_yuv(struct vl_compositor_state *s,
784 struct vl_compositor *c,
785 unsigned layer,
786 struct pipe_resource *src_res,
787 struct pipe_video_buffer *dst,
788 struct u_rect *src_rect,
789 struct u_rect *dst_rect)
790 {
791 struct pipe_sampler_view *sv, sv_templ;
792 struct pipe_surface **dst_surfaces;
793
794 dst_surfaces = dst->get_surfaces(dst);
795
796 memset(&sv_templ, 0, sizeof(sv_templ));
797 u_sampler_view_default_template(&sv_templ, src_res, src_res->format);
798 sv = s->pipe->create_sampler_view(s->pipe, src_res, &sv_templ);
799
800 set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, VL_COMPOSITOR_PLANE_Y);
801 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
802 vl_compositor_render(s, c, dst_surfaces[0], NULL, false);
803
804 if (dst_surfaces[1]) {
805 dst_rect->x0 = util_format_get_plane_width(dst->buffer_format, 1, dst_rect->x0);
806 dst_rect->x1 = util_format_get_plane_width(dst->buffer_format, 1, dst_rect->x1);
807 dst_rect->y0 = util_format_get_plane_height(dst->buffer_format, 1, dst_rect->y0);
808 dst_rect->y1 = util_format_get_plane_height(dst->buffer_format, 1, dst_rect->y1);
809 set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, dst_surfaces[2] ? VL_COMPOSITOR_PLANE_U :
810 VL_COMPOSITOR_PLANE_UV);
811 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
812 vl_compositor_render(s, c, dst_surfaces[1], NULL, false);
813
814 if (dst_surfaces[2]) {
815 set_rgb_to_yuv_layer(s, c, 0, sv, src_rect, NULL, VL_COMPOSITOR_PLANE_V);
816 vl_compositor_set_layer_dst_area(s, 0, dst_rect);
817 vl_compositor_render(s, c, dst_surfaces[2], NULL, false);
818 }
819 }
820
821 pipe_sampler_view_reference(&sv, NULL);
822 }
823
824 void
vl_compositor_render(struct vl_compositor_state * s,struct vl_compositor * c,struct pipe_surface * dst_surface,struct u_rect * dirty_area,bool clear_dirty)825 vl_compositor_render(struct vl_compositor_state *s,
826 struct vl_compositor *c,
827 struct pipe_surface *dst_surface,
828 struct u_rect *dirty_area,
829 bool clear_dirty)
830 {
831 assert(s);
832
833 if (s->layers->cs)
834 vl_compositor_cs_render(s, c, dst_surface, dirty_area, clear_dirty);
835 else if (s->layers->fs)
836 vl_compositor_gfx_render(s, c, dst_surface, dirty_area, clear_dirty);
837 else
838 debug_warning("Hardware don't support.\n");;
839 }
840
841 bool
vl_compositor_init(struct vl_compositor * c,struct pipe_context * pipe,bool compute_only)842 vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe, bool compute_only)
843 {
844 assert(c);
845
846 memset(c, 0, sizeof(*c));
847
848 c->pipe_cs_composit_supported = compute_only || pipe->screen->caps.prefer_compute_for_multimedia;
849 c->pipe_gfx_supported = !compute_only && pipe->screen->caps.graphics;
850 c->pipe = pipe;
851
852 c->deinterlace = VL_COMPOSITOR_NONE;
853
854 if (!init_pipe_state(c)) {
855 return false;
856 }
857
858 if (!init_buffers(c)) {
859 cleanup_shaders(c);
860 cleanup_pipe_state(c);
861 return false;
862 }
863
864 return true;
865 }
866
867 bool
vl_compositor_init_state(struct vl_compositor_state * s,struct pipe_context * pipe)868 vl_compositor_init_state(struct vl_compositor_state *s, struct pipe_context *pipe)
869 {
870 vl_csc_matrix csc_matrix;
871
872 assert(s);
873
874 memset(s, 0, sizeof(*s));
875
876 s->pipe = pipe;
877
878 s->clear_color.f[0] = s->clear_color.f[1] = 0.0f;
879 s->clear_color.f[2] = s->clear_color.f[3] = 0.0f;
880
881 /*
882 * Create our fragment shader's constant buffer
883 * Const buffer contains the color conversion matrix and bias vectors
884 */
885 /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
886 s->shader_params = pipe_buffer_create_const0
887 (
888 pipe->screen,
889 PIPE_BIND_CONSTANT_BUFFER,
890 PIPE_USAGE_DEFAULT,
891 sizeof(csc_matrix) + 32 * sizeof(float) + 2 * sizeof(int)
892 );
893
894 if (!s->shader_params)
895 return false;
896
897 vl_compositor_clear_layers(s);
898
899 vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, &csc_matrix);
900 if (!vl_compositor_set_csc_matrix(s, (const vl_csc_matrix *)&csc_matrix, 1.0f, 0.0f))
901 return false;
902
903 return true;
904 }
905
906 void
vl_compositor_cleanup_state(struct vl_compositor_state * s)907 vl_compositor_cleanup_state(struct vl_compositor_state *s)
908 {
909 assert(s);
910
911 vl_compositor_clear_layers(s);
912 pipe_resource_reference(&s->shader_params, NULL);
913 }
914