1 /* Copyright 2022 Advanced Micro Devices, Inc.
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a
4 * copy of this software and associated documentation files (the "Software"),
5 * to deal in the Software without restriction, including without limitation
6 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 * and/or sell copies of the Software, and to permit persons to whom the
8 * Software is furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
17 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
18 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
19 * OTHER DEALINGS IN THE SOFTWARE.
20 *
21 * Authors: AMD
22 *
23 */
24
25 #include "background.h"
26 #include "common.h"
27 #include "vpe_priv.h"
28 #include "color_bg.h"
29
vpe_create_bg_segments(struct vpe_priv * vpe_priv,struct vpe_rect * gaps,uint16_t gaps_cnt,enum vpe_cmd_ops ops)30 void vpe_create_bg_segments(
31 struct vpe_priv *vpe_priv, struct vpe_rect *gaps, uint16_t gaps_cnt, enum vpe_cmd_ops ops)
32 {
33 uint16_t gap_index;
34 struct scaler_data *scaler_data;
35 struct stream_ctx *stream_ctx = &(vpe_priv->stream_ctx[0]);
36 int32_t vp_x = stream_ctx->stream.scaling_info.src_rect.x;
37 int32_t vp_y = stream_ctx->stream.scaling_info.src_rect.y;
38 uint16_t src_div = vpe_is_yuv420(stream_ctx->stream.surface_info.format) ? 2 : 1;
39 uint16_t dst_div = vpe_is_yuv420(vpe_priv->output_ctx.surface.format) ? 2 : 1;
40
41 for (gap_index = 0; gap_index < gaps_cnt; gap_index++) {
42
43 scaler_data = &(vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].inputs[0].scaler_data);
44
45 /* format */
46 scaler_data->format = stream_ctx->stream.surface_info.format;
47 scaler_data->lb_params.alpha_en = stream_ctx->per_pixel_alpha;
48
49 /* recout */
50
51 scaler_data->recout.x = 0;
52 scaler_data->recout.y = 0;
53 scaler_data->recout.height = VPE_MIN_VIEWPORT_SIZE;
54 scaler_data->recout.width = VPE_MIN_VIEWPORT_SIZE;
55
56 /* ratios */
57 scaler_data->ratios.horz = vpe_fixpt_one;
58 scaler_data->ratios.vert = vpe_fixpt_one;
59
60 if (vpe_is_yuv420(scaler_data->format)) {
61 scaler_data->ratios.horz_c = vpe_fixpt_from_fraction(1, 2);
62 scaler_data->ratios.vert_c = vpe_fixpt_from_fraction(1, 2);
63 } else {
64 scaler_data->ratios.horz_c = vpe_fixpt_one;
65 scaler_data->ratios.vert_c = vpe_fixpt_one;
66 }
67
68 /* Active region */
69 scaler_data->h_active = gaps[gap_index].width;
70 scaler_data->v_active = gaps[gap_index].height;
71
72 /* viewport */
73
74 scaler_data->viewport.x = vp_x;
75 scaler_data->viewport.y = vp_y;
76 scaler_data->viewport.width = VPE_MIN_VIEWPORT_SIZE;
77 scaler_data->viewport.height = VPE_MIN_VIEWPORT_SIZE;
78
79 scaler_data->viewport_c.x = scaler_data->viewport.x / src_div;
80 scaler_data->viewport_c.y = scaler_data->viewport.y / src_div;
81 scaler_data->viewport_c.width = scaler_data->viewport.width / src_div;
82 scaler_data->viewport_c.height = scaler_data->viewport.height / src_div;
83
84 /* destination viewport */
85 scaler_data->dst_viewport = gaps[gap_index];
86
87 scaler_data->dst_viewport_c.x = scaler_data->dst_viewport.x / dst_div;
88 scaler_data->dst_viewport_c.y = scaler_data->dst_viewport.y / dst_div;
89 scaler_data->dst_viewport_c.width = scaler_data->dst_viewport.width / dst_div;
90 scaler_data->dst_viewport_c.height = scaler_data->dst_viewport.height / dst_div;
91
92 /* taps and inits */
93 scaler_data->taps.h_taps = scaler_data->taps.v_taps = 4;
94 scaler_data->taps.h_taps_c = scaler_data->taps.v_taps_c = 2;
95
96 scaler_data->inits.h = vpe_fixpt_div_int(
97 vpe_fixpt_add_int(scaler_data->ratios.horz, (int)(scaler_data->taps.h_taps + 1)), 2);
98 scaler_data->inits.v = vpe_fixpt_div_int(
99 vpe_fixpt_add_int(scaler_data->ratios.vert, (int)(scaler_data->taps.v_taps + 1)), 2);
100 scaler_data->inits.h_c = vpe_fixpt_div_int(
101 vpe_fixpt_add_int(scaler_data->ratios.horz_c, (int)(scaler_data->taps.h_taps_c + 1)),
102 2);
103 scaler_data->inits.v_c = vpe_fixpt_div_int(
104 vpe_fixpt_add_int(scaler_data->ratios.vert_c, (int)(scaler_data->taps.v_taps_c + 1)),
105 2);
106
107 VPE_ASSERT(gaps_cnt - gap_index - 1 <= (uint16_t)0xF);
108
109 // background takes stream_idx 0 as its input
110 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].inputs[0].stream_idx = 0;
111 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].dst_viewport = scaler_data->dst_viewport;
112 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].dst_viewport_c = scaler_data->dst_viewport_c;
113 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].num_inputs = 1;
114 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].ops = ops;
115 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].cd = (uint8_t)(gaps_cnt - gap_index - 1);
116 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].tm_enabled =
117 false; // currently only support frontend tm
118
119 if (vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].cd == (gaps_cnt - 1)) {
120 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].is_begin = true;
121 }
122
123 if (vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].cd == 0) {
124 vpe_priv->vpe_cmd_info[vpe_priv->num_vpe_cmds].is_end = true;
125 }
126
127 vpe_priv->num_vpe_cmds++;
128 }
129 }
130
vpe_full_bg_gaps(struct vpe_rect * gaps,const struct vpe_rect * target_rect,uint16_t max_gaps)131 void vpe_full_bg_gaps(struct vpe_rect *gaps, const struct vpe_rect *target_rect, uint16_t max_gaps)
132 {
133 uint16_t gap_index;
134 int32_t last_covered;
135 uint32_t gap_width, gap_remainder;
136
137 last_covered = target_rect->x;
138 gap_width = target_rect->width / max_gaps;
139 gap_remainder = target_rect->width % max_gaps;
140
141 for (gap_index = 0; gap_index < max_gaps; gap_index++) {
142 gaps[gap_index].x = last_covered;
143 gaps[gap_index].y = target_rect->y;
144 gaps[gap_index].width = gap_width;
145 if (gap_index >= max_gaps - gap_remainder) {
146 gaps[gap_index].width += 1;
147 }
148 gaps[gap_index].height = target_rect->height;
149 last_covered = last_covered + (int32_t)gaps[gap_index].width;
150 }
151 }
152
153 /* calculates the gaps in target_rect which are not covered by the first stream
154 and returns the number of gaps */
vpe_find_bg_gaps(struct vpe_priv * vpe_priv,const struct vpe_rect * target_rect,struct vpe_rect * gaps,uint16_t max_gaps)155 uint16_t vpe_find_bg_gaps(struct vpe_priv *vpe_priv, const struct vpe_rect *target_rect,
156 struct vpe_rect *gaps, uint16_t max_gaps)
157 {
158 uint16_t num_gaps = 0;
159 uint16_t num_segs;
160 struct vpe_rect *dst_viewport_rect;
161 bool full_bg = false;
162 const uint32_t max_seg_width = vpe_priv->pub.caps->plane_caps.max_viewport_width;
163 const uint16_t num_multiple = 1;
164
165 num_segs = vpe_priv->stream_ctx[0].num_segments;
166 dst_viewport_rect = &(vpe_priv->stream_ctx[0].segment_ctx[0].scaler_data.dst_viewport);
167
168 if (target_rect->x < dst_viewport_rect->x) {
169
170 if (target_rect->width <= max_seg_width) {
171 goto full_bg;
172 }
173 gaps[0].x = target_rect->x;
174 gaps[0].y = target_rect->y;
175 gaps[0].width = (uint32_t)(dst_viewport_rect->x - target_rect->x);
176 gaps[0].height = target_rect->height;
177 num_gaps++;
178 if (gaps[0].width > max_seg_width) {
179 if (!vpe_priv->resource.split_bg_gap(
180 gaps, target_rect, max_seg_width, max_gaps, &num_gaps, num_multiple)) {
181 goto full_bg;
182 }
183 }
184 }
185 dst_viewport_rect =
186 &(vpe_priv->stream_ctx[0].segment_ctx[num_segs - 1].scaler_data.dst_viewport);
187
188 if (target_rect->x + (int32_t)target_rect->width >
189 dst_viewport_rect->x + (int32_t)dst_viewport_rect->width) {
190
191 if (num_gaps == max_gaps) {
192 goto full_bg;
193 }
194
195 gaps[num_gaps].x = dst_viewport_rect->x + (int32_t)dst_viewport_rect->width;
196 gaps[num_gaps].y = target_rect->y;
197 gaps[num_gaps].width =
198 (uint32_t)(target_rect->x + (int32_t)target_rect->width -
199 (dst_viewport_rect->x + (int32_t)dst_viewport_rect->width));
200 gaps[num_gaps].height = target_rect->height;
201 num_gaps++;
202 if (gaps[num_gaps - 1].width > max_seg_width) {
203 if (!vpe_priv->resource.split_bg_gap(
204 gaps, target_rect, max_seg_width, max_gaps, &num_gaps, num_multiple)) {
205 goto full_bg;
206 }
207 }
208 }
209 return num_gaps;
210
211 full_bg:
212 vpe_full_bg_gaps(gaps, target_rect, max_gaps);
213 return max_gaps;
214 }
215