1 /* Copyright 2022 Advanced Micro Devices, Inc.
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a
4 * copy of this software and associated documentation files (the "Software"),
5 * to deal in the Software without restriction, including without limitation
6 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 * and/or sell copies of the Software, and to permit persons to whom the
8 * Software is furnished to do so, subject to the following conditions:
9 *
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: AMD
23 *
24 */
25 #include "vpe_assert.h"
26 #include "common.h"
27 #include "vpe_priv.h"
28 #include "vpe10_command.h"
29 #include "vpe10_cmd_builder.h"
30
31 /***** Internal helpers *****/
32 static void get_np_and_subop(struct vpe_priv *vpe_priv, struct vpe_cmd_info *cmd_info, struct plane_desc_header *header);
33
34 static enum VPE_PLANE_CFG_ELEMENT_SIZE vpe_get_element_size(
35 enum vpe_surface_pixel_format format, int plane_idx);
36
vpe10_construct_cmd_builder(struct vpe_priv * vpe_priv,struct cmd_builder * builder)37 void vpe10_construct_cmd_builder(struct vpe_priv *vpe_priv, struct cmd_builder *builder)
38 {
39 builder->build_noops = vpe10_build_noops;
40 builder->build_vpe_cmd = vpe10_build_vpe_cmd;
41 builder->build_plane_descriptor = vpe10_build_plane_descriptor;
42 }
43
vpe10_build_noops(struct vpe_priv * vpe_priv,uint32_t ** ppbuf,uint32_t num_dwords)44 enum vpe_status vpe10_build_noops(struct vpe_priv *vpe_priv, uint32_t **ppbuf, uint32_t num_dwords)
45 {
46 uint32_t i;
47 uint32_t *buffer = *ppbuf;
48 uint32_t noop = VPE_CMD_HEADER(VPE_CMD_OPCODE_NOP, 0);
49
50 for (i = 0; i < num_dwords; i++)
51 *buffer++ = noop;
52
53 *ppbuf = buffer;
54
55 return VPE_STATUS_OK;
56 }
57
vpe10_build_vpe_cmd(struct vpe_priv * vpe_priv,struct vpe_build_bufs * cur_bufs,uint32_t cmd_idx)58 enum vpe_status vpe10_build_vpe_cmd(
59 struct vpe_priv *vpe_priv, struct vpe_build_bufs *cur_bufs, uint32_t cmd_idx)
60 {
61 struct cmd_builder *builder = &vpe_priv->resource.cmd_builder;
62 struct vpe_desc_writer *vpe_desc_writer = &vpe_priv->vpe_desc_writer;
63 struct vpe_buf *emb_buf = &cur_bufs->emb_buf;
64 struct output_ctx *output_ctx;
65 struct pipe_ctx *pipe_ctx = NULL;
66 uint32_t pipe_idx, config_idx;
67 struct vpe_vector *config_vector;
68 struct config_record *config;
69 struct vpe_cmd_info *cmd_info = vpe_vector_get(vpe_priv->vpe_cmd_vector, cmd_idx);
70 VPE_ASSERT(cmd_info);
71
72 vpe_desc_writer->init(vpe_desc_writer, &cur_bufs->cmd_buf, cmd_info->cd);
73
74 // plane descriptor
75 builder->build_plane_descriptor(vpe_priv, emb_buf, cmd_idx);
76
77 vpe_desc_writer->add_plane_desc(
78 vpe_desc_writer, vpe_priv->plane_desc_writer.base_gpu_va, (uint8_t)emb_buf->tmz);
79
80 // reclaim any pipe if the owner no longer presents
81 vpe_pipe_reclaim(vpe_priv, cmd_info);
82
83 config_writer_init(&vpe_priv->config_writer, emb_buf);
84
85 // frontend programming
86 for (pipe_idx = 0; pipe_idx < cmd_info->num_inputs; pipe_idx++) {
87 bool reuse;
88 struct stream_ctx *stream_ctx;
89 enum vpe_cmd_type cmd_type = VPE_CMD_TYPE_COUNT;
90
91 // keep using the same pipe whenever possible
92 // this would allow reuse of the previous register configs
93 pipe_ctx = vpe_pipe_find_owner(vpe_priv, cmd_info->inputs[pipe_idx].stream_idx, &reuse);
94 VPE_ASSERT(pipe_ctx);
95
96 if (!reuse) {
97 vpe_priv->resource.program_frontend(
98 vpe_priv, pipe_ctx->pipe_idx, cmd_idx, pipe_idx, false);
99 } else {
100 if (vpe_priv->init.debug.disable_reuse_bit)
101 reuse = false;
102
103 stream_ctx = &vpe_priv->stream_ctx[cmd_info->inputs[pipe_idx].stream_idx];
104
105 // frame specific for same type of command
106 if (cmd_info->ops == VPE_CMD_OPS_BG)
107 cmd_type = VPE_CMD_TYPE_BG;
108 else if (cmd_info->ops == VPE_CMD_OPS_COMPOSITING)
109 cmd_type = VPE_CMD_TYPE_COMPOSITING;
110 else if (cmd_info->ops == VPE_CMD_OPS_BG_VSCF_INPUT)
111 cmd_type = VPE_CMD_TYPE_BG_VSCF_INPUT;
112 else if (cmd_info->ops == VPE_CMD_OPS_BG_VSCF_OUTPUT)
113 cmd_type = VPE_CMD_TYPE_BG_VSCF_OUTPUT;
114 else {
115 VPE_ASSERT(0);
116 return VPE_STATUS_ERROR;
117 }
118
119 // follow the same order of config generation in "non-reuse" case
120 // stream sharing
121 config_vector = stream_ctx->configs[pipe_idx];
122 VPE_ASSERT(config_vector->num_elements);
123 for (config_idx = 0; config_idx < config_vector->num_elements; config_idx++) {
124 config = (struct config_record *)vpe_vector_get(config_vector, config_idx);
125
126 vpe_desc_writer->add_config_desc(
127 vpe_desc_writer, config->config_base_addr, reuse, (uint8_t)emb_buf->tmz);
128 }
129
130 // stream-op sharing
131 config_vector = stream_ctx->stream_op_configs[pipe_idx][cmd_type];
132 for (config_idx = 0; config_idx < config_vector->num_elements; config_idx++) {
133 config = (struct config_record *)vpe_vector_get(config_vector, config_idx);
134
135 vpe_desc_writer->add_config_desc(
136 vpe_desc_writer, config->config_base_addr, reuse, (uint8_t)emb_buf->tmz);
137 }
138
139 // command specific
140 vpe_priv->resource.program_frontend(
141 vpe_priv, pipe_ctx->pipe_idx, cmd_idx, pipe_idx, true);
142 }
143 }
144
145 VPE_ASSERT(pipe_ctx);
146
147 // If config writer has been crashed due to buffer overflow
148 if (vpe_priv->config_writer.status != VPE_STATUS_OK) {
149 return vpe_priv->config_writer.status;
150 }
151
152 // backend programming
153 output_ctx = &vpe_priv->output_ctx;
154
155 config_vector = output_ctx->configs[0];
156 if (!config_vector->num_elements) {
157 vpe_priv->resource.program_backend(vpe_priv, pipe_ctx->pipe_idx, cmd_idx, false);
158 } else {
159 bool reuse = !vpe_priv->init.debug.disable_reuse_bit;
160
161 // re-use output register configs
162 for (config_idx = 0; config_idx < config_vector->num_elements; config_idx++) {
163 config = (struct config_record *)vpe_vector_get(config_vector, config_idx);
164
165 vpe_desc_writer->add_config_desc(
166 vpe_desc_writer, config->config_base_addr, reuse, (uint8_t)emb_buf->tmz);
167 }
168
169 vpe_priv->resource.program_backend(vpe_priv, pipe_ctx->pipe_idx, cmd_idx, true);
170 }
171
172 /* If writer crashed due to buffer overflow */
173 if (vpe_desc_writer->status != VPE_STATUS_OK) {
174 return vpe_desc_writer->status;
175 }
176
177 vpe_desc_writer->complete(vpe_desc_writer);
178
179 return VPE_STATUS_OK;
180 }
181
vpe10_build_plane_descriptor(struct vpe_priv * vpe_priv,struct vpe_buf * buf,uint32_t cmd_idx)182 enum vpe_status vpe10_build_plane_descriptor(
183 struct vpe_priv *vpe_priv, struct vpe_buf *buf, uint32_t cmd_idx)
184 {
185 struct stream_ctx *stream_ctx;
186 struct vpe_surface_info *surface_info;
187 int32_t stream_idx;
188 PHYSICAL_ADDRESS_LOC *addrloc;
189 struct plane_desc_src src;
190 struct plane_desc_dst dst;
191 struct plane_desc_header header = {0};
192 struct cmd_builder *builder = &vpe_priv->resource.cmd_builder;
193 struct plane_desc_writer *plane_desc_writer = &vpe_priv->plane_desc_writer;
194 struct vpe_cmd_info *cmd_info = vpe_vector_get(vpe_priv->vpe_cmd_vector, cmd_idx);
195 VPE_ASSERT(cmd_info);
196
197 VPE_ASSERT(cmd_info->num_inputs == 1);
198
199 // obtains number of planes for each source/destination stream
200 get_np_and_subop(vpe_priv, cmd_info, &header);
201
202 plane_desc_writer->init(&vpe_priv->plane_desc_writer, buf, &header);
203 stream_idx = cmd_info->inputs[0].stream_idx;
204 stream_ctx = &vpe_priv->stream_ctx[stream_idx];
205 surface_info = &stream_ctx->stream.surface_info;
206
207 src.tmz = surface_info->address.tmz_surface;
208 src.swizzle = surface_info->swizzle;
209 src.rotation = stream_ctx->stream.rotation;
210
211 if (surface_info->address.type == VPE_PLN_ADDR_TYPE_VIDEO_PROGRESSIVE) {
212 addrloc = &surface_info->address.video_progressive.luma_addr;
213
214 src.base_addr_lo = addrloc->u.low_part;
215 src.base_addr_hi = (uint32_t)addrloc->u.high_part;
216 src.pitch = (uint16_t)surface_info->plane_size.surface_pitch;
217 src.viewport_x = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.x;
218 src.viewport_y = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.y;
219 src.viewport_w = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.width;
220 src.viewport_h = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.height;
221 src.elem_size = (uint8_t)(vpe_get_element_size(surface_info->format, 0));
222
223 plane_desc_writer->add_source(&vpe_priv->plane_desc_writer, &src, true);
224
225 if (vpe_is_dual_plane_format(surface_info->format)) {
226 addrloc = &surface_info->address.video_progressive.chroma_addr;
227
228 src.base_addr_lo = addrloc->u.low_part;
229 src.base_addr_hi = (uint32_t)addrloc->u.high_part;
230 src.pitch = (uint16_t)surface_info->plane_size.chroma_pitch;
231 src.viewport_x = (uint16_t)cmd_info->inputs[0].scaler_data.viewport_c.x;
232 src.viewport_y = (uint16_t)cmd_info->inputs[0].scaler_data.viewport_c.y;
233 src.viewport_w = (uint16_t)cmd_info->inputs[0].scaler_data.viewport_c.width;
234 src.viewport_h = (uint16_t)cmd_info->inputs[0].scaler_data.viewport_c.height;
235 src.elem_size = (uint8_t)(vpe_get_element_size(surface_info->format, 1));
236
237 plane_desc_writer->add_source(&vpe_priv->plane_desc_writer, &src, false);
238 }
239 } else {
240 addrloc = &surface_info->address.grph.addr;
241
242 src.base_addr_lo = addrloc->u.low_part;
243 src.base_addr_hi = (uint32_t)addrloc->u.high_part;
244 src.pitch = (uint16_t)surface_info->plane_size.surface_pitch;
245 src.viewport_x = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.x;
246 src.viewport_y = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.y;
247 src.viewport_w = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.width;
248 src.viewport_h = (uint16_t)cmd_info->inputs[0].scaler_data.viewport.height;
249 src.elem_size = (uint8_t)(vpe_get_element_size(surface_info->format, 0));
250
251 plane_desc_writer->add_source(&vpe_priv->plane_desc_writer, &src, true);
252 }
253
254 surface_info = &vpe_priv->output_ctx.surface;
255
256 VPE_ASSERT(surface_info->address.type == VPE_PLN_ADDR_TYPE_GRAPHICS);
257
258 addrloc = &surface_info->address.grph.addr;
259
260 dst.tmz = surface_info->address.tmz_surface;
261 dst.swizzle = surface_info->swizzle;
262
263 if (stream_ctx->flip_horizonal_output)
264 dst.mirror = VPE_MIRROR_HORIZONTAL;
265 else
266 dst.mirror = VPE_MIRROR_NONE;
267
268 dst.base_addr_lo = addrloc->u.low_part;
269 dst.base_addr_hi = (uint32_t)addrloc->u.high_part;
270 dst.pitch = (uint16_t)surface_info->plane_size.surface_pitch;
271 dst.viewport_x = (uint16_t)cmd_info->outputs[0].dst_viewport.x;
272 dst.viewport_y = (uint16_t)cmd_info->outputs[0].dst_viewport.y;
273 dst.viewport_w = (uint16_t)cmd_info->outputs[0].dst_viewport.width;
274 dst.viewport_h = (uint16_t)cmd_info->outputs[0].dst_viewport.height;
275 dst.elem_size = (uint8_t)(vpe_get_element_size(surface_info->format, 0));
276
277 plane_desc_writer->add_destination(&vpe_priv->plane_desc_writer, &dst, true);
278
279 return vpe_priv->plane_desc_writer.status;
280 }
281
get_np_and_subop(struct vpe_priv * vpe_priv,struct vpe_cmd_info * cmd_info,struct plane_desc_header * header)282 static void get_np_and_subop(struct vpe_priv *vpe_priv, struct vpe_cmd_info *cmd_info,
283 struct plane_desc_header *header)
284 {
285 header->npd1 = 0;
286
287 header->subop = VPE_PLANE_CFG_SUBOP_1_TO_1;
288
289 if (cmd_info->num_inputs == 1) {
290 header->nps1 = 0;
291 if (vpe_is_dual_plane_format(
292 vpe_priv->stream_ctx[cmd_info->inputs[0].stream_idx].stream.surface_info.format))
293 header->nps0 = VPE_PLANE_CFG_TWO_PLANES;
294 else
295 header->nps0 = VPE_PLANE_CFG_ONE_PLANE;
296 } else {
297 header->nps0 = 0;
298 header->nps1 = 0;
299 header->npd0 = 0;
300 return;
301 }
302
303 if (vpe_is_dual_plane_format(vpe_priv->output_ctx.surface.format))
304 header->npd0 = 1;
305 else
306 header->npd0 = 0;
307 }
308
vpe_get_element_size(enum vpe_surface_pixel_format format,int plane_idx)309 static enum VPE_PLANE_CFG_ELEMENT_SIZE vpe_get_element_size(
310 enum vpe_surface_pixel_format format, int plane_idx)
311 {
312 switch (format) {
313 // nv12/21
314 case VPE_SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
315 case VPE_SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
316 if (plane_idx == 0)
317 return VPE_PLANE_CFG_ELEMENT_SIZE_8BPE;
318 else
319 return VPE_PLANE_CFG_ELEMENT_SIZE_16BPE;
320 // P010
321 case VPE_SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
322 case VPE_SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
323 if (plane_idx == 0)
324 return VPE_PLANE_CFG_ELEMENT_SIZE_16BPE;
325 else
326 return VPE_PLANE_CFG_ELEMENT_SIZE_32BPE;
327 // 64bpp
328 case VPE_SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
329 case VPE_SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
330 case VPE_SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
331 case VPE_SURFACE_PIXEL_FORMAT_GRPH_RGBA16161616F:
332 case VPE_SURFACE_PIXEL_FORMAT_GRPH_BGRA16161616F:
333 return VPE_PLANE_CFG_ELEMENT_SIZE_64BPE;
334 default:
335 break;
336 }
337 return VPE_PLANE_CFG_ELEMENT_SIZE_32BPE;
338 }
339