• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**************************************************************************
2  *
3  * Copyright 2009 Younes Manton.
4  * All Rights Reserved.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the
8  * "Software"), to deal in the Software without restriction, including
9  * without limitation the rights to use, copy, modify, merge, publish,
10  * distribute, sub license, and/or sell copies of the Software, and to
11  * permit persons to whom the Software is furnished to do so, subject to
12  * the following conditions:
13  *
14  * The above copyright notice and this permission notice (including the
15  * next paragraph) shall be included in all copies or substantial portions
16  * of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25  *
26  **************************************************************************/
27 
28 #include <assert.h>
29 
30 #include "pipe/p_context.h"
31 
32 #include "util/u_sampler.h"
33 #include "util/u_draw.h"
34 
35 #include "tgsi/tgsi_ureg.h"
36 
37 #include "vl_defines.h"
38 #include "vl_vertex_buffers.h"
39 #include "vl_mc.h"
40 #include "vl_idct.h"
41 
42 enum VS_OUTPUT
43 {
44    VS_O_VPOS = 0,
45    VS_O_VTOP = 0,
46    VS_O_VBOTTOM,
47 
48    VS_O_FLAGS = VS_O_VTOP,
49    VS_O_VTEX = VS_O_VBOTTOM
50 };
51 
52 static struct ureg_dst
calc_position(struct vl_mc * r,struct ureg_program * shader,struct ureg_src block_scale)53 calc_position(struct vl_mc *r, struct ureg_program *shader, struct ureg_src block_scale)
54 {
55    struct ureg_src vrect, vpos;
56    struct ureg_dst t_vpos;
57    struct ureg_dst o_vpos;
58 
59    vrect = ureg_DECL_vs_input(shader, VS_I_RECT);
60    vpos = ureg_DECL_vs_input(shader, VS_I_VPOS);
61 
62    t_vpos = ureg_DECL_temporary(shader);
63 
64    o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS);
65 
66    /*
67     * block_scale = (VL_MACROBLOCK_WIDTH, VL_MACROBLOCK_HEIGHT) / (dst.width, dst.height)
68     *
69     * t_vpos = (vpos + vrect) * block_scale
70     * o_vpos.xy = t_vpos
71     * o_vpos.zw = vpos
72     */
73    ureg_ADD(shader, ureg_writemask(t_vpos, TGSI_WRITEMASK_XY), vpos, vrect);
74    ureg_MUL(shader, ureg_writemask(t_vpos, TGSI_WRITEMASK_XY), ureg_src(t_vpos), block_scale);
75    ureg_MOV(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_XY), ureg_src(t_vpos));
76    ureg_MOV(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_ZW), ureg_imm1f(shader, 1.0f));
77 
78    return t_vpos;
79 }
80 
81 static struct ureg_dst
calc_line(struct pipe_screen * screen,struct ureg_program * shader)82 calc_line(struct pipe_screen *screen, struct ureg_program *shader)
83 {
84    struct ureg_dst tmp;
85    struct ureg_src pos;
86 
87    tmp = ureg_DECL_temporary(shader);
88 
89    if (screen->get_param(screen, PIPE_CAP_TGSI_FS_POSITION_IS_SYSVAL))
90       pos = ureg_DECL_system_value(shader, TGSI_SEMANTIC_POSITION, 0);
91    else
92       pos = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS,
93                                TGSI_INTERPOLATE_LINEAR);
94 
95    /*
96     * tmp.y = fraction(pos.y / 2) >= 0.5 ? 1 : 0
97     */
98    ureg_MUL(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), pos, ureg_imm1f(shader, 0.5f));
99    ureg_FRC(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), ureg_src(tmp));
100    ureg_SGE(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y), ureg_src(tmp), ureg_imm1f(shader, 0.5f));
101 
102    return tmp;
103 }
104 
105 static void *
create_ref_vert_shader(struct vl_mc * r)106 create_ref_vert_shader(struct vl_mc *r)
107 {
108    struct ureg_program *shader;
109    struct ureg_src mv_scale;
110    struct ureg_src vmv[2];
111    struct ureg_dst t_vpos;
112    struct ureg_dst o_vmv[2];
113    unsigned i;
114 
115    shader = ureg_create(PIPE_SHADER_VERTEX);
116    if (!shader)
117       return NULL;
118 
119    vmv[0] = ureg_DECL_vs_input(shader, VS_I_MV_TOP);
120    vmv[1] = ureg_DECL_vs_input(shader, VS_I_MV_BOTTOM);
121 
122    t_vpos = calc_position(r, shader, ureg_imm2f(shader,
123       (float)VL_MACROBLOCK_WIDTH / r->buffer_width,
124       (float)VL_MACROBLOCK_HEIGHT / r->buffer_height)
125    );
126 
127    o_vmv[0] = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP);
128    o_vmv[1] = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM);
129 
130    /*
131     * mv_scale.xy = 0.5 / (dst.width, dst.height);
132     * mv_scale.z = 1.0f / 4.0f
133     * mv_scale.w = 1.0f / 255.0f
134     *
135     * // Apply motion vectors
136     * o_vmv[0..1].xy = vmv[0..1] * mv_scale + t_vpos
137     * o_vmv[0..1].zw = vmv[0..1] * mv_scale
138     *
139     */
140 
141    mv_scale = ureg_imm4f(shader,
142       0.5f / r->buffer_width,
143       0.5f / r->buffer_height,
144       1.0f / 4.0f,
145       1.0f / PIPE_VIDEO_MV_WEIGHT_MAX);
146 
147    for (i = 0; i < 2; ++i) {
148       ureg_MAD(shader, ureg_writemask(o_vmv[i], TGSI_WRITEMASK_XY), mv_scale, vmv[i], ureg_src(t_vpos));
149       ureg_MUL(shader, ureg_writemask(o_vmv[i], TGSI_WRITEMASK_ZW), mv_scale, vmv[i]);
150    }
151 
152    ureg_release_temporary(shader, t_vpos);
153 
154    ureg_END(shader);
155 
156    return ureg_create_shader_and_destroy(shader, r->pipe);
157 }
158 
159 static void *
create_ref_frag_shader(struct vl_mc * r)160 create_ref_frag_shader(struct vl_mc *r)
161 {
162    const float y_scale =
163       r->buffer_height / 2 *
164       r->macroblock_size / VL_MACROBLOCK_HEIGHT;
165 
166    struct ureg_program *shader;
167    struct ureg_src tc[2], sampler;
168    struct ureg_dst ref, field;
169    struct ureg_dst fragment;
170    unsigned label;
171 
172    shader = ureg_create(PIPE_SHADER_FRAGMENT);
173    if (!shader)
174       return NULL;
175 
176    tc[0] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VTOP, TGSI_INTERPOLATE_LINEAR);
177    tc[1] = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_VBOTTOM, TGSI_INTERPOLATE_LINEAR);
178 
179    sampler = ureg_DECL_sampler(shader, 0);
180    ref = ureg_DECL_temporary(shader);
181 
182    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
183 
184    field = calc_line(r->pipe->screen, shader);
185 
186    /*
187     * ref = field.z ? tc[1] : tc[0]
188     *
189     * // Adjust tc acording to top/bottom field selection
190     * if (|ref.z|) {
191     *    ref.y *= y_scale
192     *    ref.y = floor(ref.y)
193     *    ref.y += ref.z
194     *    ref.y /= y_scale
195     * }
196     * fragment.xyz = tex(ref, sampler[0])
197     */
198    ureg_CMP(shader, ureg_writemask(ref, TGSI_WRITEMASK_XYZ),
199             ureg_negate(ureg_scalar(ureg_src(field), TGSI_SWIZZLE_Y)),
200             tc[1], tc[0]);
201    ureg_CMP(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W),
202             ureg_negate(ureg_scalar(ureg_src(field), TGSI_SWIZZLE_Y)),
203             tc[1], tc[0]);
204 
205    ureg_IF(shader, ureg_scalar(ureg_src(ref), TGSI_SWIZZLE_Z), &label);
206 
207       ureg_MUL(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y),
208                ureg_src(ref), ureg_imm1f(shader, y_scale));
209       ureg_FLR(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y), ureg_src(ref));
210       ureg_ADD(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y),
211                ureg_src(ref), ureg_scalar(ureg_src(ref), TGSI_SWIZZLE_Z));
212       ureg_MUL(shader, ureg_writemask(ref, TGSI_WRITEMASK_Y),
213                ureg_src(ref), ureg_imm1f(shader, 1.0f / y_scale));
214 
215    ureg_fixup_label(shader, label, ureg_get_instruction_number(shader));
216    ureg_ENDIF(shader);
217 
218    ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ), TGSI_TEXTURE_2D, ureg_src(ref), sampler);
219 
220    ureg_release_temporary(shader, ref);
221 
222    ureg_release_temporary(shader, field);
223    ureg_END(shader);
224 
225    return ureg_create_shader_and_destroy(shader, r->pipe);
226 }
227 
228 static void *
create_ycbcr_vert_shader(struct vl_mc * r,vl_mc_ycbcr_vert_shader vs_callback,void * callback_priv)229 create_ycbcr_vert_shader(struct vl_mc *r, vl_mc_ycbcr_vert_shader vs_callback, void *callback_priv)
230 {
231    struct ureg_program *shader;
232 
233    struct ureg_src vrect, vpos;
234    struct ureg_dst t_vpos, t_vtex;
235    struct ureg_dst o_vpos, o_flags;
236 
237    struct vertex2f scale = {
238       (float)VL_BLOCK_WIDTH / r->buffer_width * VL_MACROBLOCK_WIDTH / r->macroblock_size,
239       (float)VL_BLOCK_HEIGHT / r->buffer_height * VL_MACROBLOCK_HEIGHT / r->macroblock_size
240    };
241 
242    unsigned label;
243 
244    shader = ureg_create(PIPE_SHADER_VERTEX);
245    if (!shader)
246       return NULL;
247 
248    vrect = ureg_DECL_vs_input(shader, VS_I_RECT);
249    vpos = ureg_DECL_vs_input(shader, VS_I_VPOS);
250 
251    t_vpos = calc_position(r, shader, ureg_imm2f(shader, scale.x, scale.y));
252    t_vtex = ureg_DECL_temporary(shader);
253 
254    o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, VS_O_VPOS);
255    o_flags = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, VS_O_FLAGS);
256 
257    /*
258     * o_vtex.xy = t_vpos
259     * o_flags.z = intra * 0.5
260     *
261     * if(interlaced) {
262     *    t_vtex.xy = vrect.y ? { 0, scale.y } : { -scale.y : 0 }
263     *    t_vtex.z = vpos.y % 2
264     *    t_vtex.y = t_vtex.z ? t_vtex.x : t_vtex.y
265     *    o_vpos.y = t_vtex.y + t_vpos.y
266     *
267     *    o_flags.w = t_vtex.z ? 0 : 1
268     * }
269     *
270     */
271 
272    vs_callback(callback_priv, r, shader, VS_O_VTEX, t_vpos);
273 
274    ureg_MUL(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_Z),
275             ureg_scalar(vpos, TGSI_SWIZZLE_Z), ureg_imm1f(shader, 0.5f));
276    ureg_MOV(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_W), ureg_imm1f(shader, -1.0f));
277 
278    if (r->macroblock_size == VL_MACROBLOCK_HEIGHT) { //TODO
279       ureg_IF(shader, ureg_scalar(vpos, TGSI_SWIZZLE_W), &label);
280 
281          ureg_CMP(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_XY),
282                   ureg_negate(ureg_scalar(vrect, TGSI_SWIZZLE_Y)),
283                   ureg_imm2f(shader, 0.0f, scale.y),
284                   ureg_imm2f(shader, -scale.y, 0.0f));
285          ureg_MUL(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Z),
286                   ureg_scalar(vpos, TGSI_SWIZZLE_Y), ureg_imm1f(shader, 0.5f));
287 
288          ureg_FRC(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Z), ureg_src(t_vtex));
289 
290          ureg_CMP(shader, ureg_writemask(t_vtex, TGSI_WRITEMASK_Y),
291                   ureg_negate(ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Z)),
292                   ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_X),
293                   ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Y));
294          ureg_ADD(shader, ureg_writemask(o_vpos, TGSI_WRITEMASK_Y),
295                   ureg_src(t_vpos), ureg_src(t_vtex));
296 
297          ureg_CMP(shader, ureg_writemask(o_flags, TGSI_WRITEMASK_W),
298                   ureg_negate(ureg_scalar(ureg_src(t_vtex), TGSI_SWIZZLE_Z)),
299                   ureg_imm1f(shader, 0.0f), ureg_imm1f(shader, 1.0f));
300 
301       ureg_fixup_label(shader, label, ureg_get_instruction_number(shader));
302       ureg_ENDIF(shader);
303    }
304 
305    ureg_release_temporary(shader, t_vtex);
306    ureg_release_temporary(shader, t_vpos);
307 
308    ureg_END(shader);
309 
310    return ureg_create_shader_and_destroy(shader, r->pipe);
311 }
312 
313 static void *
create_ycbcr_frag_shader(struct vl_mc * r,float scale,bool invert,vl_mc_ycbcr_frag_shader fs_callback,void * callback_priv)314 create_ycbcr_frag_shader(struct vl_mc *r, float scale, bool invert,
315                          vl_mc_ycbcr_frag_shader fs_callback, void *callback_priv)
316 {
317    struct ureg_program *shader;
318    struct ureg_src flags;
319    struct ureg_dst tmp;
320    struct ureg_dst fragment;
321    unsigned label;
322 
323    shader = ureg_create(PIPE_SHADER_FRAGMENT);
324    if (!shader)
325       return NULL;
326 
327    flags = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, VS_O_FLAGS, TGSI_INTERPOLATE_LINEAR);
328 
329    fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
330 
331    tmp = calc_line(r->pipe->screen, shader);
332 
333    /*
334     * if (field == tc.w)
335     *    kill();
336     * else {
337     *    fragment.xyz  = tex(tc, sampler) * scale + tc.z
338     *    fragment.w = 1.0f
339     * }
340     */
341 
342    ureg_SEQ(shader, ureg_writemask(tmp, TGSI_WRITEMASK_Y),
343             ureg_scalar(flags, TGSI_SWIZZLE_W), ureg_src(tmp));
344 
345    ureg_IF(shader, ureg_scalar(ureg_src(tmp), TGSI_SWIZZLE_Y), &label);
346 
347       ureg_KILL(shader);
348 
349    ureg_fixup_label(shader, label, ureg_get_instruction_number(shader));
350    ureg_ELSE(shader, &label);
351 
352       fs_callback(callback_priv, r, shader, VS_O_VTEX, tmp);
353 
354       if (scale != 1.0f)
355          ureg_MAD(shader, ureg_writemask(tmp, TGSI_WRITEMASK_XYZ),
356                   ureg_src(tmp), ureg_imm1f(shader, scale),
357                   ureg_scalar(flags, TGSI_SWIZZLE_Z));
358       else
359          ureg_ADD(shader, ureg_writemask(tmp, TGSI_WRITEMASK_XYZ),
360                   ureg_src(tmp), ureg_scalar(flags, TGSI_SWIZZLE_Z));
361 
362       ureg_MUL(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ), ureg_src(tmp), ureg_imm1f(shader, invert ? -1.0f : 1.0f));
363       ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
364 
365    ureg_fixup_label(shader, label, ureg_get_instruction_number(shader));
366    ureg_ENDIF(shader);
367 
368    ureg_release_temporary(shader, tmp);
369 
370    ureg_END(shader);
371 
372    return ureg_create_shader_and_destroy(shader, r->pipe);
373 }
374 
375 static bool
init_pipe_state(struct vl_mc * r)376 init_pipe_state(struct vl_mc *r)
377 {
378    struct pipe_sampler_state sampler;
379    struct pipe_blend_state blend;
380    struct pipe_rasterizer_state rs_state;
381    unsigned i;
382 
383    assert(r);
384 
385    memset(&sampler, 0, sizeof(sampler));
386    sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
387    sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
388    sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_BORDER;
389    sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
390    sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
391    sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
392    sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
393    sampler.compare_func = PIPE_FUNC_ALWAYS;
394    sampler.normalized_coords = 1;
395    r->sampler_ref = r->pipe->create_sampler_state(r->pipe, &sampler);
396    if (!r->sampler_ref)
397       goto error_sampler_ref;
398 
399    for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) {
400       memset(&blend, 0, sizeof blend);
401       blend.independent_blend_enable = 0;
402       blend.rt[0].blend_enable = 1;
403       blend.rt[0].rgb_func = PIPE_BLEND_ADD;
404       blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
405       blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ZERO;
406       blend.rt[0].alpha_func = PIPE_BLEND_ADD;
407       blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
408       blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ZERO;
409       blend.logicop_enable = 0;
410       blend.logicop_func = PIPE_LOGICOP_CLEAR;
411       blend.rt[0].colormask = i;
412       blend.dither = 0;
413       r->blend_clear[i] = r->pipe->create_blend_state(r->pipe, &blend);
414       if (!r->blend_clear[i])
415          goto error_blend;
416 
417       blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_ONE;
418       blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
419       r->blend_add[i] = r->pipe->create_blend_state(r->pipe, &blend);
420       if (!r->blend_add[i])
421          goto error_blend;
422 
423       blend.rt[0].rgb_func = PIPE_BLEND_REVERSE_SUBTRACT;
424       blend.rt[0].alpha_dst_factor = PIPE_BLEND_REVERSE_SUBTRACT;
425       r->blend_sub[i] = r->pipe->create_blend_state(r->pipe, &blend);
426       if (!r->blend_sub[i])
427          goto error_blend;
428    }
429 
430    memset(&rs_state, 0, sizeof(rs_state));
431    /*rs_state.sprite_coord_enable */
432    rs_state.sprite_coord_mode = PIPE_SPRITE_COORD_UPPER_LEFT;
433    rs_state.point_quad_rasterization = true;
434    rs_state.point_size = VL_BLOCK_WIDTH;
435    rs_state.half_pixel_center = true;
436    rs_state.bottom_edge_rule = true;
437    rs_state.depth_clip_near = 1;
438    rs_state.depth_clip_far = 1;
439 
440    r->rs_state = r->pipe->create_rasterizer_state(r->pipe, &rs_state);
441    if (!r->rs_state)
442       goto error_rs_state;
443 
444    return true;
445 
446 error_rs_state:
447 error_blend:
448    for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) {
449       if (r->blend_sub[i])
450          r->pipe->delete_blend_state(r->pipe, r->blend_sub[i]);
451 
452       if (r->blend_add[i])
453          r->pipe->delete_blend_state(r->pipe, r->blend_add[i]);
454 
455       if (r->blend_clear[i])
456          r->pipe->delete_blend_state(r->pipe, r->blend_clear[i]);
457    }
458 
459    r->pipe->delete_sampler_state(r->pipe, r->sampler_ref);
460 
461 error_sampler_ref:
462    return false;
463 }
464 
465 static void
cleanup_pipe_state(struct vl_mc * r)466 cleanup_pipe_state(struct vl_mc *r)
467 {
468    unsigned i;
469 
470    assert(r);
471 
472    r->pipe->delete_sampler_state(r->pipe, r->sampler_ref);
473    for (i = 0; i < VL_MC_NUM_BLENDERS; ++i) {
474       r->pipe->delete_blend_state(r->pipe, r->blend_clear[i]);
475       r->pipe->delete_blend_state(r->pipe, r->blend_add[i]);
476       r->pipe->delete_blend_state(r->pipe, r->blend_sub[i]);
477    }
478    r->pipe->delete_rasterizer_state(r->pipe, r->rs_state);
479 }
480 
481 bool
vl_mc_init(struct vl_mc * renderer,struct pipe_context * pipe,unsigned buffer_width,unsigned buffer_height,unsigned macroblock_size,float scale,vl_mc_ycbcr_vert_shader vs_callback,vl_mc_ycbcr_frag_shader fs_callback,void * callback_priv)482 vl_mc_init(struct vl_mc *renderer, struct pipe_context *pipe,
483            unsigned buffer_width, unsigned buffer_height,
484            unsigned macroblock_size, float scale,
485            vl_mc_ycbcr_vert_shader vs_callback,
486            vl_mc_ycbcr_frag_shader fs_callback,
487            void *callback_priv)
488 {
489    assert(renderer);
490    assert(pipe);
491 
492    memset(renderer, 0, sizeof(struct vl_mc));
493 
494    renderer->pipe = pipe;
495    renderer->buffer_width = buffer_width;
496    renderer->buffer_height = buffer_height;
497    renderer->macroblock_size = macroblock_size;
498 
499    if (!init_pipe_state(renderer))
500       goto error_pipe_state;
501 
502    renderer->vs_ref = create_ref_vert_shader(renderer);
503    if (!renderer->vs_ref)
504       goto error_vs_ref;
505 
506    renderer->vs_ycbcr = create_ycbcr_vert_shader(renderer, vs_callback, callback_priv);
507    if (!renderer->vs_ycbcr)
508       goto error_vs_ycbcr;
509 
510    renderer->fs_ref = create_ref_frag_shader(renderer);
511    if (!renderer->fs_ref)
512       goto error_fs_ref;
513 
514    renderer->fs_ycbcr = create_ycbcr_frag_shader(renderer, scale, false, fs_callback, callback_priv);
515    if (!renderer->fs_ycbcr)
516       goto error_fs_ycbcr;
517 
518    renderer->fs_ycbcr_sub = create_ycbcr_frag_shader(renderer, scale, true, fs_callback, callback_priv);
519    if (!renderer->fs_ycbcr_sub)
520       goto error_fs_ycbcr_sub;
521 
522    return true;
523 
524 error_fs_ycbcr_sub:
525    renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr);
526 
527 error_fs_ycbcr:
528    renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ref);
529 
530 error_fs_ref:
531    renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ycbcr);
532 
533 error_vs_ycbcr:
534    renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ref);
535 
536 error_vs_ref:
537    cleanup_pipe_state(renderer);
538 
539 error_pipe_state:
540    return false;
541 }
542 
543 void
vl_mc_cleanup(struct vl_mc * renderer)544 vl_mc_cleanup(struct vl_mc *renderer)
545 {
546    assert(renderer);
547 
548    cleanup_pipe_state(renderer);
549 
550    renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ref);
551    renderer->pipe->delete_vs_state(renderer->pipe, renderer->vs_ycbcr);
552    renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ref);
553    renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr);
554    renderer->pipe->delete_fs_state(renderer->pipe, renderer->fs_ycbcr_sub);
555 }
556 
557 bool
vl_mc_init_buffer(struct vl_mc * renderer,struct vl_mc_buffer * buffer)558 vl_mc_init_buffer(struct vl_mc *renderer, struct vl_mc_buffer *buffer)
559 {
560    assert(renderer && buffer);
561 
562    buffer->viewport.scale[2] = 1;
563    buffer->viewport.translate[0] = 0;
564    buffer->viewport.translate[1] = 0;
565    buffer->viewport.translate[2] = 0;
566 
567    buffer->fb_state.nr_cbufs = 1;
568    buffer->fb_state.zsbuf = NULL;
569 
570    return true;
571 }
572 
573 void
vl_mc_cleanup_buffer(struct vl_mc_buffer * buffer)574 vl_mc_cleanup_buffer(struct vl_mc_buffer *buffer)
575 {
576    assert(buffer);
577 }
578 
579 void
vl_mc_set_surface(struct vl_mc_buffer * buffer,struct pipe_surface * surface)580 vl_mc_set_surface(struct vl_mc_buffer *buffer, struct pipe_surface *surface)
581 {
582    assert(buffer && surface);
583 
584    buffer->surface_cleared = false;
585 
586    buffer->viewport.scale[0] = surface->width;
587    buffer->viewport.scale[1] = surface->height;
588 
589    buffer->fb_state.width = surface->width;
590    buffer->fb_state.height = surface->height;
591    buffer->fb_state.cbufs[0] = surface;
592 }
593 
594 static void
prepare_pipe_4_rendering(struct vl_mc * renderer,struct vl_mc_buffer * buffer,unsigned mask)595 prepare_pipe_4_rendering(struct vl_mc *renderer, struct vl_mc_buffer *buffer, unsigned mask)
596 {
597    assert(buffer);
598 
599    renderer->pipe->bind_rasterizer_state(renderer->pipe, renderer->rs_state);
600 
601    if (buffer->surface_cleared)
602       renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_add[mask]);
603    else
604       renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_clear[mask]);
605 
606    renderer->pipe->set_framebuffer_state(renderer->pipe, &buffer->fb_state);
607    renderer->pipe->set_viewport_states(renderer->pipe, 0, 1, &buffer->viewport);
608 }
609 
610 void
vl_mc_render_ref(struct vl_mc * renderer,struct vl_mc_buffer * buffer,struct pipe_sampler_view * ref)611 vl_mc_render_ref(struct vl_mc *renderer, struct vl_mc_buffer *buffer, struct pipe_sampler_view *ref)
612 {
613    assert(buffer && ref);
614 
615    prepare_pipe_4_rendering(renderer, buffer, PIPE_MASK_R | PIPE_MASK_G | PIPE_MASK_B);
616 
617    renderer->pipe->bind_vs_state(renderer->pipe, renderer->vs_ref);
618    renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ref);
619 
620    renderer->pipe->set_sampler_views(renderer->pipe, PIPE_SHADER_FRAGMENT,
621                                      0, 1, &ref);
622    renderer->pipe->bind_sampler_states(renderer->pipe, PIPE_SHADER_FRAGMENT,
623                                        0, 1, &renderer->sampler_ref);
624 
625    util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0,
626                               renderer->buffer_width / VL_MACROBLOCK_WIDTH *
627                               renderer->buffer_height / VL_MACROBLOCK_HEIGHT);
628 
629    buffer->surface_cleared = true;
630 }
631 
632 void
vl_mc_render_ycbcr(struct vl_mc * renderer,struct vl_mc_buffer * buffer,unsigned component,unsigned num_instances)633 vl_mc_render_ycbcr(struct vl_mc *renderer, struct vl_mc_buffer *buffer, unsigned component, unsigned num_instances)
634 {
635    unsigned mask = 1 << component;
636 
637    assert(buffer);
638 
639    if (num_instances == 0)
640       return;
641 
642    prepare_pipe_4_rendering(renderer, buffer, mask);
643 
644    renderer->pipe->bind_vs_state(renderer->pipe, renderer->vs_ycbcr);
645    renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ycbcr);
646 
647    util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0, num_instances);
648 
649    if (buffer->surface_cleared) {
650       renderer->pipe->bind_blend_state(renderer->pipe, renderer->blend_sub[mask]);
651       renderer->pipe->bind_fs_state(renderer->pipe, renderer->fs_ycbcr_sub);
652       util_draw_arrays_instanced(renderer->pipe, PIPE_PRIM_QUADS, 0, 4, 0, num_instances);
653    }
654 }
655