• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright 2022 Collabora Ltd.
4  */
5 
6 #include "trace.h"
7 #include <math.h>
8 
9 struct trace_context ctx_trace = {};
10 
is_video_or_media_device(const char * path)11 bool is_video_or_media_device(const char *path)
12 {
13 	std::string dev_path_video = "/dev/video";
14 	std::string dev_path_media = "/dev/media";
15 	bool is_video = strncmp(path, dev_path_video.c_str(), dev_path_video.length()) == 0;
16 	bool is_media = strncmp(path, dev_path_media.c_str(), dev_path_media.length()) == 0;
17 	return (is_video || is_media);
18 }
19 
add_device(int fd,std::string path)20 void add_device(int fd, std::string path)
21 {
22 	debug_line_info("\n\tfd: %d, path: %s", fd, path.c_str());
23 	std::pair<int, std::string> new_pair = std::make_pair(fd, path);
24 	ctx_trace.devices.insert(new_pair);
25 }
26 
get_device(int fd)27 std::string get_device(int fd)
28 {
29 	std::string path;
30 	auto it = ctx_trace.devices.find(fd);
31 	if (it != ctx_trace.devices.end())
32 		path = it->second;
33 	return path;
34 }
35 
print_devices(void)36 void print_devices(void)
37 {
38 	if (!is_debug())
39 		return;
40 	if (ctx_trace.devices.size())
41 		fprintf(stderr, "Devices:\n");
42 	for (auto &device_pair : ctx_trace.devices)
43 		fprintf(stderr, "fd: %d, path: %s\n", device_pair.first, device_pair.second.c_str());
44 }
45 
print_decode_order(void)46 void print_decode_order(void)
47 {
48 	if (!is_debug())
49 		return;
50 	fprintf(stderr, "Decode order: ");
51 	for (auto &num : ctx_trace.decode_order)
52 		fprintf(stderr, "%ld, ",  num);
53 	fprintf(stderr, ".\n");
54 }
55 
set_decode_order(long decode_order)56 void set_decode_order(long decode_order)
57 {
58 	debug_line_info("\n\t%ld", decode_order);
59 
60 	auto it = find(ctx_trace.decode_order.begin(), ctx_trace.decode_order.end(), decode_order);
61 	if (it == ctx_trace.decode_order.end())
62 		ctx_trace.decode_order.push_front(decode_order);
63 
64 	print_decode_order();
65 }
66 
get_decode_order(void)67 long get_decode_order(void)
68 {
69 	long decode_order = 0;
70 	if (!ctx_trace.decode_order.empty())
71 		decode_order = ctx_trace.decode_order.front();
72 	return decode_order;
73 }
74 
add_buffer_trace(int fd,__u32 type,__u32 index,__u32 offset=0)75 void add_buffer_trace(int fd, __u32 type, __u32 index, __u32 offset = 0)
76 {
77 	struct buffer_trace buf = {};
78 	buf.fd = fd;
79 	buf.type = type;
80 	buf.index = index;
81 	buf.offset = offset;
82 	buf.display_order = -1;
83 	ctx_trace.buffers.push_front(buf);
84 }
85 
remove_buffer_trace(__u32 type,__u32 index)86 void remove_buffer_trace(__u32 type, __u32 index)
87 {
88 	for (auto it = ctx_trace.buffers.begin(); it != ctx_trace.buffers.end(); ++it) {
89 		if ((it->type == type) && (it->index == index)) {
90 			ctx_trace.buffers.erase(it);
91 			break;
92 		}
93 	}
94 }
95 
buffer_in_trace_context(int fd,__u32 offset)96 bool buffer_in_trace_context(int fd, __u32 offset)
97 {
98 	bool buffer_in_trace_context = false;
99 	for (auto &b : ctx_trace.buffers) {
100 		if ((b.fd == fd) && (b.offset == offset)) {
101 			buffer_in_trace_context = true;
102 			break;
103 		}
104 	}
105 	return buffer_in_trace_context;
106 }
107 
get_buffer_fd_trace(__u32 type,__u32 index)108 int get_buffer_fd_trace(__u32 type, __u32 index)
109 {
110 	int fd = 0;
111 	for (auto &b : ctx_trace.buffers) {
112 		if ((b.type == type) && (b.index == index)) {
113 			fd = b.fd;
114 			break;
115 		}
116 	}
117 	return fd;
118 }
119 
get_buffer_type_trace(int fd,__u32 offset)120 __u32 get_buffer_type_trace(int fd, __u32 offset)
121 {
122 	__u32 type = 0;
123 	for (auto &b : ctx_trace.buffers) {
124 		if ((b.fd == fd) && (b.offset == offset)) {
125 			type = b.type;
126 			break;
127 		}
128 	}
129 	return type;
130 }
131 
get_buffer_index_trace(int fd,__u32 offset)132 int get_buffer_index_trace(int fd, __u32 offset)
133 {
134 	int index = -1;
135 	for (auto &b : ctx_trace.buffers) {
136 		if ((b.fd == fd) && (b.offset == offset)) {
137 			index = b.index;
138 			break;
139 		}
140 	}
141 	return index;
142 }
143 
get_buffer_offset_trace(__u32 type,__u32 index)144 __u32 get_buffer_offset_trace(__u32 type, __u32 index)
145 {
146 	__u32 offset = 0;
147 	for (auto &b : ctx_trace.buffers) {
148 		if ((b.type == type) && (b.index == index)) {
149 			offset = b.offset;
150 			break;
151 		}
152 	}
153 	return offset;
154 }
155 
set_buffer_bytesused_trace(int fd,__u32 offset,__u32 bytesused)156 void set_buffer_bytesused_trace(int fd, __u32 offset, __u32 bytesused)
157 {
158 	for (auto &b : ctx_trace.buffers) {
159 		if ((b.fd == fd) && (b.offset == offset)) {
160 			b.bytesused = bytesused;
161 			break;
162 		}
163 	}
164 }
165 
get_buffer_bytesused_trace(int fd,__u32 offset)166 long get_buffer_bytesused_trace(int fd, __u32 offset)
167 {
168 	long bytesused = 0;
169 	for (auto &b : ctx_trace.buffers) {
170 		if ((b.fd == fd) && (b.offset == offset)) {
171 			bytesused = b.bytesused;
172 			break;
173 		}
174 	}
175 	return bytesused;
176 }
177 
set_buffer_display_order(int fd,__u32 offset,long display_order)178 void set_buffer_display_order(int fd, __u32 offset, long display_order)
179 {
180 	debug_line_info("\n\t%ld", display_order);
181 	for (auto &b : ctx_trace.buffers) {
182 		if ((b.fd == fd) && (b.offset == offset)) {
183 			b.display_order = display_order;
184 			break;
185 		}
186 	}
187 }
188 
set_buffer_address_trace(int fd,__u32 offset,unsigned long address)189 void set_buffer_address_trace(int fd, __u32 offset, unsigned long address)
190 {
191 	for (auto &b : ctx_trace.buffers) {
192 		if ((b.fd == fd) && (b.offset == offset)) {
193 			b.address = address;
194 			break;
195 		}
196 	}
197 }
198 
get_buffer_address_trace(int fd,__u32 offset)199 unsigned long get_buffer_address_trace(int fd, __u32 offset)
200 {
201 	unsigned long address = 0;
202 	for (auto &b : ctx_trace.buffers) {
203 		if ((b.fd == fd) && (b.offset == offset)) {
204 			address = b.address;
205 			break;
206 		}
207 	}
208 	return address;
209 }
210 
buffer_is_mapped(unsigned long buffer_address)211 bool buffer_is_mapped(unsigned long buffer_address)
212 {
213 	bool ret = false;
214 	for (auto &b : ctx_trace.buffers) {
215 		if (b.address == buffer_address) {
216 			ret = true;
217 			break;
218 		}
219 	}
220 	return ret;
221 }
222 
print_buffers_trace(void)223 void print_buffers_trace(void)
224 {
225 	if (!is_debug())
226 		return;
227 	for (auto &b : ctx_trace.buffers) {
228 		fprintf(stderr, "fd: %d, %s, index: %d, display_order: %ld, bytesused: %d, ",
229 		        b.fd, val2s(b.type, v4l2_buf_type_val_def).c_str(), b.index, b.display_order, b.bytesused);
230 		fprintf(stderr, "address: %lu, offset: %u \n",  b.address, b.offset);
231 	}
232 }
233 
get_expected_length_trace()234 unsigned get_expected_length_trace()
235 {
236 	/*
237 	 * TODO: this assumes that the stride is equal to the real width and that the
238 	 * padding follows the end of the chroma plane. It could be improved by
239 	 * following the model in v4l2-ctl-streaming.cpp read_write_padded_frame()
240 	 */
241 	unsigned expected_length = ctx_trace.width * ctx_trace.height;
242 	if (ctx_trace.pixelformat == V4L2_PIX_FMT_NV12 || ctx_trace.pixelformat == V4L2_PIX_FMT_YUV420) {
243 		expected_length *= 3;
244 		expected_length /= 2;
245 		expected_length += (expected_length % 2);
246 	}
247 	return expected_length;
248 }
249 
s_ext_ctrls_setup(struct v4l2_ext_controls * ext_controls)250 void s_ext_ctrls_setup(struct v4l2_ext_controls *ext_controls)
251 {
252 	if (ext_controls->which != V4L2_CTRL_WHICH_REQUEST_VAL)
253 		return;
254 
255 	debug_line_info();
256 	/*
257 	 * Since userspace sends H264 frames out of order, get information
258 	 * about the correct display order of each frame so that v4l2-tracer
259 	 * can write the decoded frames to a file.
260 	 */
261 	for (__u32 i = 0; i < ext_controls->count; i++) {
262 		struct v4l2_ext_control ctrl = ext_controls->controls[i];
263 
264 		switch (ctrl.id) {
265 		case V4L2_CID_STATELESS_H264_SPS: {
266 			ctx_trace.fmt.h264.max_pic_order_cnt_lsb = pow(2, ctrl.p_h264_sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
267 			break;
268 		}
269 		case V4L2_CID_STATELESS_H264_DECODE_PARAMS: {
270 			long pic_order_cnt_msb;
271 			int max = ctx_trace.fmt.h264.max_pic_order_cnt_lsb;
272 			long prev_pic_order_cnt_msb = get_decode_order();
273 			int prev_pic_order_cnt_lsb = ctx_trace.fmt.h264.pic_order_cnt_lsb;
274 			int pic_order_cnt_lsb = ctrl.p_h264_decode_params->pic_order_cnt_lsb;
275 
276 			if (is_debug()) {
277 				line_info();
278 				fprintf(stderr, "\tprev_pic_order_cnt_lsb: %d\n", prev_pic_order_cnt_lsb);
279 				fprintf(stderr, "\tprev_pic_order_cnt_msb: %ld\n", prev_pic_order_cnt_msb);
280 				fprintf(stderr, "\tpic_order_cnt_lsb: %d\n", pic_order_cnt_lsb);
281 			}
282 
283 			/*
284 			 * TODO: improve the displaying of decoded frames following H264 specification
285 			 * 8.2.1.1. For now, dump all the previously decoded frames when an IDR_PIC is
286 			 * received to avoid losing frames although this will still sometimes result
287 			 * in frames out of order.
288 			 */
289 			if ((ctrl.p_h264_decode_params->flags & V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC) != 0U)
290 				trace_mem_decoded();
291 
292 			/*
293 			 * When pic_order_cnt_lsb wraps around to zero, adjust the total count using
294 			 * max to keep the correct display order.
295 			 */
296 			if ((pic_order_cnt_lsb < prev_pic_order_cnt_lsb) &&
297 				((prev_pic_order_cnt_lsb - pic_order_cnt_lsb) >= (max / 2))) {
298 				pic_order_cnt_msb = prev_pic_order_cnt_msb + max;
299 			} else if ((pic_order_cnt_lsb > prev_pic_order_cnt_lsb) &&
300 				((pic_order_cnt_lsb - prev_pic_order_cnt_lsb) > (max / 2))) {
301 				pic_order_cnt_msb = prev_pic_order_cnt_msb - max;
302 			} else {
303 				pic_order_cnt_msb = prev_pic_order_cnt_msb + (pic_order_cnt_lsb - prev_pic_order_cnt_lsb);
304 			}
305 
306 			debug_line_info("\n\tpic_order_cnt_msb: %ld", pic_order_cnt_msb);
307 			ctx_trace.fmt.h264.pic_order_cnt_lsb = pic_order_cnt_lsb;
308 			set_decode_order(pic_order_cnt_msb);
309 			break;
310 		}
311 		default:
312 			break;
313 		}
314 	}
315 }
316 
qbuf_setup(struct v4l2_buffer * buf)317 void qbuf_setup(struct v4l2_buffer *buf)
318 {
319 	debug_line_info("\n\t%s, index: %d", val2s(buf->type, v4l2_buf_type_val_def).c_str(), buf->index);
320 
321 	int buf_fd = get_buffer_fd_trace(buf->type, buf->index);
322 	__u32 buf_offset = get_buffer_offset_trace(buf->type, buf->index);
323 
324 	__u32 bytesused = 0;
325 	if (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
326 		bytesused = buf->m.planes[0].bytesused;
327 	else if (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
328 		bytesused = buf->bytesused;
329 	if (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
330 	    buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
331 		set_buffer_bytesused_trace(buf_fd, buf_offset, bytesused);
332 
333 	/* The output buffer should have compressed data just before it is queued, so trace it. */
334 	if (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ||
335 	    buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
336 		trace_mem_encoded(buf_fd, buf_offset);
337 	}
338 
339 	if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
340 	    buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
341 		/*
342 		 * If the capture buffer is queued for reuse, trace it before it is reused.
343 		 * Capture buffers can't be traced using dqbuf because the buffer is mmapped
344 		 * after the call to dqbuf.
345 		 */
346 		trace_mem_decoded();
347 
348 		/* H264 sets display order in controls, otherwise display just in the order queued. */
349 		if (ctx_trace.compression_format != V4L2_PIX_FMT_H264_SLICE)
350 			set_decode_order(get_decode_order() + 1);
351 
352 		set_buffer_display_order(buf_fd, buf_offset, get_decode_order());
353 		print_decode_order();
354 		print_buffers_trace();
355 	}
356 }
357 
dqbuf_setup(struct v4l2_buffer * buf)358 void dqbuf_setup(struct v4l2_buffer *buf)
359 {
360 	debug_line_info("\n\t%s, index: %d", val2s(buf->type, v4l2_buf_type_val_def).c_str(), buf->index);
361 
362 	int buf_fd = get_buffer_fd_trace(buf->type, buf->index);
363 	__u32 buf_offset = get_buffer_offset_trace(buf->type, buf->index);
364 
365 	__u32 bytesused = 0;
366 	if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
367 		bytesused = buf->m.planes[0].bytesused;
368 	else if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
369 		bytesused = buf->bytesused;
370 
371 	if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
372 	    buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
373 		set_buffer_bytesused_trace(buf_fd, buf_offset, bytesused);
374 }
375 
streamoff_cleanup(v4l2_buf_type buf_type)376 void streamoff_cleanup(v4l2_buf_type buf_type)
377 {
378 	debug_line_info();
379 	if (is_verbose() || (getenv("V4L2_TRACER_OPTION_WRITE_DECODED_TO_YUV_FILE") != nullptr)) {
380 		fprintf(stderr, "VIDIOC_STREAMOFF: %s\n", val2s(buf_type, v4l2_buf_type_val_def).c_str());
381 		fprintf(stderr, "%s, %s %s, width: %d, height: %d\n",
382 		        val2s(ctx_trace.compression_format, v4l2_pix_fmt_val_def).c_str(),
383 		        val2s(ctx_trace.pixelformat, v4l2_pix_fmt_val_def).c_str(),
384 		        fcc2s(ctx_trace.pixelformat).c_str(), ctx_trace.width, ctx_trace.height);
385 	}
386 
387 	/*
388 	 * Before turning off the stream, trace any remaining capture buffers that were missed
389 	 * because they were not queued for reuse.
390 	 */
391 	if (buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE ||
392 	    buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
393 		trace_mem_decoded();
394 }
395 
g_fmt_setup_trace(struct v4l2_format * format)396 void g_fmt_setup_trace(struct v4l2_format *format)
397 {
398 	if (format->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
399 		ctx_trace.width = format->fmt.pix.width;
400 		ctx_trace.height = format->fmt.pix.height;
401 		ctx_trace.pixelformat = format->fmt.pix.pixelformat;
402 	}
403 	if (format->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
404 		ctx_trace.width = format->fmt.pix_mp.width;
405 		ctx_trace.height = format->fmt.pix_mp.height;
406 		ctx_trace.pixelformat = format->fmt.pix_mp.pixelformat;
407 	}
408 	if (format->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
409 		ctx_trace.compression_format = format->fmt.pix.pixelformat;
410 	if (format->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
411 		ctx_trace.compression_format = format->fmt.pix_mp.pixelformat;
412 }
413 
s_fmt_setup(struct v4l2_format * format)414 void s_fmt_setup(struct v4l2_format *format)
415 {
416 	if (format->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
417 		ctx_trace.compression_format = format->fmt.pix.pixelformat;
418 	if (format->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
419 		ctx_trace.compression_format = format->fmt.pix_mp.pixelformat;
420 }
421 
expbuf_setup(struct v4l2_exportbuffer * export_buffer)422 void expbuf_setup(struct v4l2_exportbuffer *export_buffer)
423 {
424 	__u32 type = export_buffer->type;
425 	__u32 index = export_buffer->index;
426 	int fd_found_in_trace_context = get_buffer_fd_trace(type, index);
427 
428 	/* If the buffer was already added to the trace context don't add it again. */
429 	if (fd_found_in_trace_context == export_buffer->fd)
430 		return;
431 
432 	/*
433 	 * If a buffer was previously added to the trace context using the video device
434 	 * file descriptor, replace the video fd with the more specific buffer fd from EXPBUF.
435 	 */
436 	if (fd_found_in_trace_context != 0)
437 		remove_buffer_trace(type, index);
438 
439 	add_buffer_trace(export_buffer->fd, type, index);
440 }
441 
querybuf_setup(int fd,struct v4l2_buffer * buf)442 void querybuf_setup(int fd, struct v4l2_buffer *buf)
443 {
444 	/* If the buffer was already added to the trace context don't add it again. */
445 	if (get_buffer_fd_trace(buf->type, buf->index) != 0)
446 		return;
447 
448 	if (buf->memory == V4L2_MEMORY_MMAP) {
449 		__u32 offset = 0;
450 		if ((buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) ||
451 		    (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT))
452 			offset = buf->m.offset;
453 		if ((buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) ||
454 		    (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE))
455 			offset = buf->m.planes->m.mem_offset;
456 		add_buffer_trace(fd, buf->type, buf->index, offset);
457 	}
458 }
459 
query_ext_ctrl_setup(int fd,struct v4l2_query_ext_ctrl * ptr)460 void query_ext_ctrl_setup(int fd, struct v4l2_query_ext_ctrl *ptr)
461 {
462 	if (ptr->flags & (V4L2_CTRL_FLAG_HAS_PAYLOAD|V4L2_CTRL_FLAG_DYNAMIC_ARRAY)) {
463 		if (ptr->id == V4L2_CID_STATELESS_HEVC_ENTRY_POINT_OFFSETS)
464 			ctx_trace.elems = ptr->elems;
465 	}
466 }
467 
write_json_object_to_json_file(json_object * jobj)468 void write_json_object_to_json_file(json_object *jobj)
469 {
470 	std::string json_str;
471 	if (getenv("V4L2_TRACER_OPTION_COMPACT_PRINT") != nullptr)
472 		json_str = json_object_to_json_string_ext(jobj, JSON_C_TO_STRING_PLAIN);
473 	else
474 		json_str = json_object_to_json_string_ext(jobj, JSON_C_TO_STRING_PRETTY);
475 
476 	if (ctx_trace.trace_file == nullptr) {
477 		std::string filename;
478 		if (getenv("TRACE_ID") != nullptr)
479 			filename = getenv("TRACE_ID");
480 		ctx_trace.trace_filename = filename;
481 		ctx_trace.trace_filename += ".json";
482 		ctx_trace.trace_file = fopen(ctx_trace.trace_filename.c_str(), "a");
483 	}
484 
485 	fwrite(json_str.c_str(), sizeof(char), json_str.length(), ctx_trace.trace_file);
486 	fputs(",\n", ctx_trace.trace_file);
487 	fflush(ctx_trace.trace_file);
488 }
489 
close_json_file(void)490 void close_json_file(void)
491 {
492 	if (ctx_trace.trace_file != nullptr) {
493 		fclose(ctx_trace.trace_file);
494 		ctx_trace.trace_file = 0;
495 	}
496 }
497