1 /*
2 * Copyright (C) 2014-2017 SUMOMO Computer Association
3 * Authors Ayaka <ayaka@soulik.info>
4 * Copyright (C) 2017 Collabora Ltd.
5 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
16 *
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
21 *
22 */
23
24 #ifdef HAVE_CONFIG_H
25 #include "config.h"
26 #endif
27
28 #include <sys/stat.h>
29 #include <fcntl.h>
30 #include <errno.h>
31 #include <unistd.h>
32 #include <string.h>
33
34 #include "gstv4l2object.h"
35 #include "gstv4l2videoenc.h"
36
37 #include <string.h>
38 #include <gst/gst-i18n-plugin.h>
39
40 GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_enc_debug);
41 #define GST_CAT_DEFAULT gst_v4l2_video_enc_debug
42
43 typedef struct
44 {
45 gchar *device;
46 GstCaps *sink_caps;
47 GstCaps *src_caps;
48 const GstV4l2Codec *codec;
49 } GstV4l2VideoEncCData;
50
51 enum
52 {
53 PROP_0,
54 V4L2_STD_OBJECT_PROPS,
55 };
56
57 #define gst_v4l2_video_enc_parent_class parent_class
58 G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoEnc, gst_v4l2_video_enc,
59 GST_TYPE_VIDEO_ENCODER);
60
61 static void
gst_v4l2_video_enc_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)62 gst_v4l2_video_enc_set_property (GObject * object,
63 guint prop_id, const GValue * value, GParamSpec * pspec)
64 {
65 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
66
67 switch (prop_id) {
68 case PROP_CAPTURE_IO_MODE:
69 if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
70 prop_id, value, pspec)) {
71 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
72 }
73 break;
74
75 /* By default, only set on output */
76 default:
77 if (!gst_v4l2_object_set_property_helper (self->v4l2output,
78 prop_id, value, pspec)) {
79 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
80 }
81 break;
82 }
83 }
84
85 static void
gst_v4l2_video_enc_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)86 gst_v4l2_video_enc_get_property (GObject * object,
87 guint prop_id, GValue * value, GParamSpec * pspec)
88 {
89 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
90
91 switch (prop_id) {
92 case PROP_CAPTURE_IO_MODE:
93 if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
94 prop_id, value, pspec)) {
95 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
96 }
97 break;
98
99 /* By default read from output */
100 default:
101 if (!gst_v4l2_object_get_property_helper (self->v4l2output,
102 prop_id, value, pspec)) {
103 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
104 }
105 break;
106 }
107 }
108
109 static gboolean
gst_v4l2_video_enc_open(GstVideoEncoder * encoder)110 gst_v4l2_video_enc_open (GstVideoEncoder * encoder)
111 {
112 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
113 GstV4l2Error error = GST_V4L2_ERROR_INIT;
114 GstCaps *codec_caps;
115
116 GST_DEBUG_OBJECT (self, "Opening");
117
118 if (!gst_v4l2_object_open (self->v4l2output, &error))
119 goto failure;
120
121 if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
122 goto failure;
123
124 self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
125 gst_v4l2_object_get_raw_caps ());
126
127 if (gst_caps_is_empty (self->probed_sinkcaps))
128 goto no_raw_format;
129
130 codec_caps = gst_pad_get_pad_template_caps (encoder->srcpad);
131 self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
132 codec_caps);
133 gst_caps_unref (codec_caps);
134
135 if (gst_caps_is_empty (self->probed_srccaps))
136 goto no_encoded_format;
137
138 return TRUE;
139
140 no_encoded_format:
141 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
142 (_("Encoder on device %s has no supported output format"),
143 self->v4l2output->videodev), (NULL));
144 goto failure;
145
146
147 no_raw_format:
148 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
149 (_("Encoder on device %s has no supported input format"),
150 self->v4l2output->videodev), (NULL));
151 goto failure;
152
153 failure:
154 if (GST_V4L2_IS_OPEN (self->v4l2output))
155 gst_v4l2_object_close (self->v4l2output);
156
157 if (GST_V4L2_IS_OPEN (self->v4l2capture))
158 gst_v4l2_object_close (self->v4l2capture);
159
160 gst_caps_replace (&self->probed_srccaps, NULL);
161 gst_caps_replace (&self->probed_sinkcaps, NULL);
162
163 gst_v4l2_error (self, &error);
164
165 return FALSE;
166 }
167
168 static gboolean
gst_v4l2_video_enc_close(GstVideoEncoder * encoder)169 gst_v4l2_video_enc_close (GstVideoEncoder * encoder)
170 {
171 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
172
173 GST_DEBUG_OBJECT (self, "Closing");
174
175 gst_v4l2_object_close (self->v4l2output);
176 gst_v4l2_object_close (self->v4l2capture);
177 gst_caps_replace (&self->probed_srccaps, NULL);
178 gst_caps_replace (&self->probed_sinkcaps, NULL);
179
180 return TRUE;
181 }
182
183 static gboolean
gst_v4l2_video_enc_start(GstVideoEncoder * encoder)184 gst_v4l2_video_enc_start (GstVideoEncoder * encoder)
185 {
186 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
187
188 GST_DEBUG_OBJECT (self, "Starting");
189
190 gst_v4l2_object_unlock (self->v4l2output);
191 g_atomic_int_set (&self->active, TRUE);
192 self->output_flow = GST_FLOW_OK;
193
194 return TRUE;
195 }
196
197 static gboolean
gst_v4l2_video_enc_stop(GstVideoEncoder * encoder)198 gst_v4l2_video_enc_stop (GstVideoEncoder * encoder)
199 {
200 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
201
202 GST_DEBUG_OBJECT (self, "Stopping");
203
204 gst_v4l2_object_unlock (self->v4l2output);
205 gst_v4l2_object_unlock (self->v4l2capture);
206
207 /* Wait for capture thread to stop */
208 gst_pad_stop_task (encoder->srcpad);
209
210 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
211 self->output_flow = GST_FLOW_OK;
212 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
213
214 /* Should have been flushed already */
215 g_assert (g_atomic_int_get (&self->active) == FALSE);
216 g_assert (g_atomic_int_get (&self->processing) == FALSE);
217
218 gst_v4l2_object_stop (self->v4l2output);
219 gst_v4l2_object_stop (self->v4l2capture);
220
221 if (self->input_state) {
222 gst_video_codec_state_unref (self->input_state);
223 self->input_state = NULL;
224 }
225
226 GST_DEBUG_OBJECT (self, "Stopped");
227
228 return TRUE;
229 }
230
231 static gboolean
gst_v4l2_encoder_cmd(GstV4l2Object * v4l2object,guint cmd,guint flags)232 gst_v4l2_encoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
233 {
234 struct v4l2_encoder_cmd ecmd = { 0, };
235
236 GST_DEBUG_OBJECT (v4l2object->element,
237 "sending v4l2 encoder command %u with flags %u", cmd, flags);
238
239 if (!GST_V4L2_IS_OPEN (v4l2object))
240 return FALSE;
241
242 ecmd.cmd = cmd;
243 ecmd.flags = flags;
244 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENCODER_CMD, &ecmd) < 0)
245 goto ecmd_failed;
246
247 return TRUE;
248
249 ecmd_failed:
250 if (errno == ENOTTY) {
251 GST_INFO_OBJECT (v4l2object->element,
252 "Failed to send encoder command %u with flags %u for '%s'. (%s)",
253 cmd, flags, v4l2object->videodev, g_strerror (errno));
254 } else {
255 GST_ERROR_OBJECT (v4l2object->element,
256 "Failed to send encoder command %u with flags %u for '%s'. (%s)",
257 cmd, flags, v4l2object->videodev, g_strerror (errno));
258 }
259 return FALSE;
260 }
261
262 static GstFlowReturn
gst_v4l2_video_enc_finish(GstVideoEncoder * encoder)263 gst_v4l2_video_enc_finish (GstVideoEncoder * encoder)
264 {
265 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
266 GstFlowReturn ret = GST_FLOW_OK;
267
268 if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
269 goto done;
270
271 GST_DEBUG_OBJECT (self, "Finishing encoding");
272
273 /* drop the stream lock while draining, so remaining buffers can be
274 * pushed from the src pad task thread */
275 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
276
277 if (gst_v4l2_encoder_cmd (self->v4l2capture, V4L2_ENC_CMD_STOP, 0)) {
278 GstTask *task = encoder->srcpad->task;
279
280 /* Wait for the task to be drained */
281 GST_DEBUG_OBJECT (self, "Waiting for encoder stop");
282 GST_OBJECT_LOCK (task);
283 while (GST_TASK_STATE (task) == GST_TASK_STARTED)
284 GST_TASK_WAIT (task);
285 GST_OBJECT_UNLOCK (task);
286 ret = GST_FLOW_FLUSHING;
287 }
288
289 /* and ensure the processing thread has stopped in case another error
290 * occurred. */
291 gst_v4l2_object_unlock (self->v4l2capture);
292 gst_pad_stop_task (encoder->srcpad);
293 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
294
295 if (ret == GST_FLOW_FLUSHING)
296 ret = self->output_flow;
297
298 GST_DEBUG_OBJECT (encoder, "Done draining buffers");
299
300 done:
301 return ret;
302 }
303
304 static gboolean
gst_v4l2_video_enc_set_format(GstVideoEncoder * encoder,GstVideoCodecState * state)305 gst_v4l2_video_enc_set_format (GstVideoEncoder * encoder,
306 GstVideoCodecState * state)
307 {
308 gboolean ret = TRUE;
309 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
310 GstV4l2Error error = GST_V4L2_ERROR_INIT;
311 GstCaps *outcaps;
312 GstVideoCodecState *output;
313
314 GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
315
316 if (self->input_state) {
317 if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
318 GST_DEBUG_OBJECT (self, "Compatible caps");
319 return TRUE;
320 }
321
322 if (gst_v4l2_video_enc_finish (encoder) != GST_FLOW_OK)
323 return FALSE;
324
325 gst_v4l2_object_stop (self->v4l2output);
326 gst_v4l2_object_stop (self->v4l2capture);
327
328 gst_video_codec_state_unref (self->input_state);
329 self->input_state = NULL;
330 }
331
332 outcaps = gst_pad_get_pad_template_caps (encoder->srcpad);
333 outcaps = gst_caps_make_writable (outcaps);
334 output = gst_video_encoder_set_output_state (encoder, outcaps, state);
335 gst_video_codec_state_unref (output);
336
337 if (!gst_video_encoder_negotiate (encoder))
338 return FALSE;
339
340 if (!gst_v4l2_object_set_format (self->v4l2output, state->caps, &error)) {
341 gst_v4l2_error (self, &error);
342 return FALSE;
343 }
344
345 /* best effort */
346 gst_v4l2_object_setup_padding (self->v4l2output);
347
348 self->input_state = gst_video_codec_state_ref (state);
349
350 GST_DEBUG_OBJECT (self, "output caps: %" GST_PTR_FORMAT, state->caps);
351
352 return ret;
353 }
354
355 static gboolean
gst_v4l2_video_enc_flush(GstVideoEncoder * encoder)356 gst_v4l2_video_enc_flush (GstVideoEncoder * encoder)
357 {
358 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
359
360 GST_DEBUG_OBJECT (self, "Flushing");
361
362 /* Ensure the processing thread has stopped for the reverse playback
363 * iscount case */
364 if (g_atomic_int_get (&self->processing)) {
365 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
366
367 gst_v4l2_object_unlock_stop (self->v4l2output);
368 gst_v4l2_object_unlock_stop (self->v4l2capture);
369 gst_pad_stop_task (encoder->srcpad);
370
371 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
372
373 }
374
375 self->output_flow = GST_FLOW_OK;
376
377 gst_v4l2_object_unlock_stop (self->v4l2output);
378 gst_v4l2_object_unlock_stop (self->v4l2capture);
379
380 return TRUE;
381 }
382
383 struct ProfileLevelCtx
384 {
385 GstV4l2VideoEnc *self;
386 const gchar *profile;
387 const gchar *level;
388 };
389
390 static gboolean
get_string_list(GstStructure * s,const gchar * field,GQueue * queue)391 get_string_list (GstStructure * s, const gchar * field, GQueue * queue)
392 {
393 const GValue *value;
394
395 value = gst_structure_get_value (s, field);
396
397 if (!value)
398 return FALSE;
399
400 if (GST_VALUE_HOLDS_LIST (value)) {
401 guint i;
402
403 if (gst_value_list_get_size (value) == 0)
404 return FALSE;
405
406 for (i = 0; i < gst_value_list_get_size (value); i++) {
407 const GValue *item = gst_value_list_get_value (value, i);
408
409 if (G_VALUE_HOLDS_STRING (item))
410 g_queue_push_tail (queue, g_value_dup_string (item));
411 }
412 } else if (G_VALUE_HOLDS_STRING (value)) {
413 g_queue_push_tail (queue, g_value_dup_string (value));
414 }
415
416 return TRUE;
417 }
418
419 static gboolean
negotiate_profile_and_level(GstCapsFeatures * features,GstStructure * s,gpointer user_data)420 negotiate_profile_and_level (GstCapsFeatures * features, GstStructure * s,
421 gpointer user_data)
422 {
423 struct ProfileLevelCtx *ctx = user_data;
424 GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (ctx->self);
425 GstV4l2Object *v4l2object = GST_V4L2_VIDEO_ENC (ctx->self)->v4l2output;
426 GQueue profiles = G_QUEUE_INIT;
427 GQueue levels = G_QUEUE_INIT;
428 gboolean failed = FALSE;
429 const GstV4l2Codec *codec = klass->codec;
430
431 if (codec->profile_cid && get_string_list (s, "profile", &profiles)) {
432 GList *l;
433
434 for (l = profiles.head; l; l = l->next) {
435 struct v4l2_control control = { 0, };
436 gint v4l2_profile;
437 const gchar *profile = l->data;
438
439 GST_TRACE_OBJECT (ctx->self, "Trying profile %s", profile);
440
441 control.id = codec->profile_cid;
442
443 control.value = v4l2_profile = codec->profile_from_string (profile);
444
445 if (control.value < 0)
446 continue;
447
448 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
449 GST_WARNING_OBJECT (ctx->self, "Failed to set %s profile: '%s'",
450 klass->codec_name, g_strerror (errno));
451 break;
452 }
453
454 profile = codec->profile_to_string (control.value);
455
456 if (control.value == v4l2_profile) {
457 ctx->profile = profile;
458 break;
459 }
460
461 if (g_list_find_custom (l, profile, g_str_equal)) {
462 ctx->profile = profile;
463 break;
464 }
465 }
466
467 if (profiles.length && !ctx->profile)
468 failed = TRUE;
469
470 g_queue_foreach (&profiles, (GFunc) g_free, NULL);
471 g_queue_clear (&profiles);
472 }
473
474 if (!failed && codec->level_cid && get_string_list (s, "level", &levels)) {
475 GList *l;
476
477 for (l = levels.head; l; l = l->next) {
478 struct v4l2_control control = { 0, };
479 gint v4l2_level;
480 const gchar *level = l->data;
481
482 GST_TRACE_OBJECT (ctx->self, "Trying level %s", level);
483
484 control.id = codec->level_cid;
485 control.value = v4l2_level = codec->level_from_string (level);
486
487 if (control.value < 0)
488 continue;
489
490 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
491 GST_WARNING_OBJECT (ctx->self, "Failed to set %s level: '%s'",
492 klass->codec_name, g_strerror (errno));
493 break;
494 }
495
496 level = codec->level_to_string (control.value);
497
498 if (control.value == v4l2_level) {
499 ctx->level = level;
500 break;
501 }
502
503 if (g_list_find_custom (l, level, g_str_equal)) {
504 ctx->level = level;
505 break;
506 }
507 }
508
509 if (levels.length && !ctx->level)
510 failed = TRUE;
511
512 g_queue_foreach (&levels, (GFunc) g_free, NULL);
513 g_queue_clear (&levels);
514 }
515
516 /* If it failed, we continue */
517 return failed;
518 }
519
520 static gboolean
gst_v4l2_video_enc_negotiate(GstVideoEncoder * encoder)521 gst_v4l2_video_enc_negotiate (GstVideoEncoder * encoder)
522 {
523 GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (encoder);
524 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
525 GstV4l2Object *v4l2object = self->v4l2output;
526 GstCaps *allowed_caps;
527 struct ProfileLevelCtx ctx = { self, NULL, NULL };
528 GstVideoCodecState *state;
529 GstStructure *s;
530 const GstV4l2Codec *codec = klass->codec;
531
532 GST_DEBUG_OBJECT (self, "Negotiating %s profile and level.",
533 klass->codec_name);
534
535 /* Only renegotiate on upstream changes */
536 if (self->input_state)
537 return TRUE;
538
539 if (!codec)
540 goto done;
541
542 allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
543
544 if (allowed_caps) {
545
546 if (gst_caps_is_empty (allowed_caps))
547 goto not_negotiated;
548
549 allowed_caps = gst_caps_make_writable (allowed_caps);
550
551 /* negotiate_profile_and_level() will return TRUE on failure to keep
552 * iterating, if gst_caps_foreach() returns TRUE it means there was no
553 * compatible profile and level in any of the structure */
554 if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) {
555 goto no_profile_level;
556 }
557
558 gst_caps_unref (allowed_caps);
559 allowed_caps = NULL;
560 }
561
562 if (codec->profile_cid && !ctx.profile) {
563 struct v4l2_control control = { 0, };
564
565 control.id = codec->profile_cid;
566
567 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
568 goto g_ctrl_failed;
569
570 ctx.profile = codec->profile_to_string (control.value);
571 }
572
573 if (codec->level_cid && !ctx.level) {
574 struct v4l2_control control = { 0, };
575
576 control.id = codec->level_cid;
577
578 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
579 goto g_ctrl_failed;
580
581 ctx.level = codec->level_to_string (control.value);
582 }
583
584 GST_DEBUG_OBJECT (self, "Selected %s profile %s at level %s",
585 klass->codec_name, ctx.profile, ctx.level);
586
587 state = gst_video_encoder_get_output_state (encoder);
588 s = gst_caps_get_structure (state->caps, 0);
589
590 if (codec->profile_cid)
591 gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile, NULL);
592
593 if (codec->level_cid)
594 gst_structure_set (s, "level", G_TYPE_STRING, ctx.level, NULL);
595
596 done:
597 if (!GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder))
598 return FALSE;
599
600 return TRUE;
601
602 g_ctrl_failed:
603 GST_WARNING_OBJECT (self, "Failed to get %s profile and level: '%s'",
604 klass->codec_name, g_strerror (errno));
605 goto not_negotiated;
606
607 no_profile_level:
608 GST_WARNING_OBJECT (self, "No compatible level and profile in caps: %"
609 GST_PTR_FORMAT, allowed_caps);
610 goto not_negotiated;
611
612 not_negotiated:
613 if (allowed_caps)
614 gst_caps_unref (allowed_caps);
615 return FALSE;
616 }
617
618 static gboolean
check_system_frame_number_too_old(guint32 current,guint32 old)619 check_system_frame_number_too_old (guint32 current, guint32 old)
620 {
621 guint32 absdiff = current > old ? current - old : old - current;
622
623 /* More than 100 frames in the past, or current wrapped around */
624 if (absdiff > 100) {
625 /* Wraparound and difference is actually smaller than 100 */
626 if (absdiff > G_MAXUINT32 - 100)
627 return FALSE;
628 return TRUE;
629 }
630
631 return FALSE;
632 }
633
634 static void
gst_v4l2_video_enc_loop(GstVideoEncoder * encoder)635 gst_v4l2_video_enc_loop (GstVideoEncoder * encoder)
636 {
637 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
638 GstVideoCodecFrame *frame;
639 GstBuffer *buffer = NULL;
640 GstFlowReturn ret;
641
642 GST_LOG_OBJECT (encoder, "Allocate output buffer");
643
644 buffer = gst_video_encoder_allocate_output_buffer (encoder,
645 self->v4l2capture->info.size);
646
647 if (NULL == buffer) {
648 ret = GST_FLOW_FLUSHING;
649 goto beach;
650 }
651
652 /* FIXME Check if buffer isn't the last one here */
653
654 GST_LOG_OBJECT (encoder, "Process output buffer");
655 ret =
656 gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
657 (self->v4l2capture->pool), &buffer, NULL);
658
659 if (ret != GST_FLOW_OK)
660 goto beach;
661
662 if (GST_BUFFER_TIMESTAMP (buffer) % GST_SECOND != 0)
663 GST_ERROR_OBJECT (encoder,
664 "Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git");
665 GST_LOG_OBJECT (encoder, "Got buffer for frame number %u",
666 (guint32) (GST_BUFFER_PTS (buffer) / GST_SECOND));
667 frame =
668 gst_video_encoder_get_frame (encoder,
669 GST_BUFFER_TIMESTAMP (buffer) / GST_SECOND);
670
671 if (frame) {
672 GstVideoCodecFrame *oldest_frame;
673 gboolean warned = FALSE;
674
675 /* Garbage collect old frames in case of codec bugs */
676 while ((oldest_frame = gst_video_encoder_get_oldest_frame (encoder)) &&
677 check_system_frame_number_too_old (frame->system_frame_number,
678 oldest_frame->system_frame_number)) {
679 gst_video_encoder_finish_frame (encoder, oldest_frame);
680 oldest_frame = NULL;
681
682 if (!warned) {
683 g_warning ("%s: Too old frames, bug in encoder -- please file a bug",
684 GST_ELEMENT_NAME (encoder));
685 warned = TRUE;
686 }
687 }
688 if (oldest_frame)
689 gst_video_codec_frame_unref (oldest_frame);
690
691 /* At this point, the delta unit buffer flag is already correctly set by
692 * gst_v4l2_buffer_pool_process. Since gst_video_encoder_finish_frame
693 * will overwrite it from GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame),
694 * set that here.
695 */
696 if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
697 GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
698 else
699 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
700 frame->output_buffer = buffer;
701 buffer = NULL;
702 ret = gst_video_encoder_finish_frame (encoder, frame);
703
704 if (ret != GST_FLOW_OK)
705 goto beach;
706 } else {
707 GST_WARNING_OBJECT (encoder, "Encoder is producing too many buffers");
708 gst_buffer_unref (buffer);
709 }
710
711 return;
712
713 beach:
714 GST_DEBUG_OBJECT (encoder, "Leaving output thread");
715
716 gst_buffer_replace (&buffer, NULL);
717 self->output_flow = ret;
718 g_atomic_int_set (&self->processing, FALSE);
719 gst_v4l2_object_unlock (self->v4l2output);
720 gst_pad_pause_task (encoder->srcpad);
721 }
722
723 static void
gst_v4l2_video_enc_loop_stopped(GstV4l2VideoEnc * self)724 gst_v4l2_video_enc_loop_stopped (GstV4l2VideoEnc * self)
725 {
726 if (g_atomic_int_get (&self->processing)) {
727 GST_DEBUG_OBJECT (self, "Early stop of encoding thread");
728 self->output_flow = GST_FLOW_FLUSHING;
729 g_atomic_int_set (&self->processing, FALSE);
730 }
731
732 GST_DEBUG_OBJECT (self, "Encoding task destroyed: %s",
733 gst_flow_get_name (self->output_flow));
734
735 }
736
737 static GstFlowReturn
gst_v4l2_video_enc_handle_frame(GstVideoEncoder * encoder,GstVideoCodecFrame * frame)738 gst_v4l2_video_enc_handle_frame (GstVideoEncoder * encoder,
739 GstVideoCodecFrame * frame)
740 {
741 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
742 GstFlowReturn ret = GST_FLOW_OK;
743 GstTaskState task_state;
744
745 GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
746
747 if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
748 goto flushing;
749
750 task_state = gst_pad_get_task_state (GST_VIDEO_ENCODER_SRC_PAD (self));
751 if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
752 GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
753
754 /* It is possible that the processing thread stopped due to an error or
755 * when the last buffer has been met during the draining process. */
756 if (self->output_flow != GST_FLOW_OK &&
757 self->output_flow != GST_FLOW_FLUSHING &&
758 self->output_flow != GST_V4L2_FLOW_LAST_BUFFER) {
759 GST_DEBUG_OBJECT (self, "Processing loop stopped with error: %s, leaving",
760 gst_flow_get_name (self->output_flow));
761 ret = self->output_flow;
762 goto drop;
763 }
764
765 /* Ensure input internal pool is active */
766 if (!gst_buffer_pool_is_active (pool)) {
767 GstStructure *config = gst_buffer_pool_get_config (pool);
768 guint min = MAX (self->v4l2output->min_buffers,
769 GST_V4L2_MIN_BUFFERS (self->v4l2output));
770
771 gst_buffer_pool_config_set_params (config, self->input_state->caps,
772 self->v4l2output->info.size, min, min);
773
774 /* There is no reason to refuse this config */
775 if (!gst_buffer_pool_set_config (pool, config))
776 goto activate_failed;
777
778 if (!gst_buffer_pool_set_active (pool, TRUE))
779 goto activate_failed;
780 }
781
782 if (!gst_buffer_pool_set_active
783 (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) {
784 GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
785 goto activate_failed;
786 }
787
788 GST_DEBUG_OBJECT (self, "Starting encoding thread");
789
790 /* Start the processing task, when it quits, the task will disable input
791 * processing to unlock input if draining, or prevent potential block */
792 if (!gst_pad_start_task (encoder->srcpad,
793 (GstTaskFunction) gst_v4l2_video_enc_loop, self,
794 (GDestroyNotify) gst_v4l2_video_enc_loop_stopped))
795 goto start_task_failed;
796 }
797
798 if (frame->input_buffer) {
799 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
800 GST_LOG_OBJECT (encoder, "Passing buffer with frame number %u",
801 frame->system_frame_number);
802 ret =
803 gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
804 v4l2output->pool), &frame->input_buffer,
805 &frame->system_frame_number);
806 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
807
808 if (ret == GST_FLOW_FLUSHING) {
809 if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
810 ret = self->output_flow;
811 goto drop;
812 } else if (ret != GST_FLOW_OK) {
813 goto process_failed;
814 }
815 }
816
817 gst_video_codec_frame_unref (frame);
818 return ret;
819
820 /* ERRORS */
821 activate_failed:
822 {
823 GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
824 (_("Failed to allocate required memory.")),
825 ("Buffer pool activation failed"));
826 return GST_FLOW_ERROR;
827
828 }
829 flushing:
830 {
831 ret = GST_FLOW_FLUSHING;
832 goto drop;
833 }
834 start_task_failed:
835 {
836 GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
837 (_("Failed to start encoding thread.")), (NULL));
838 g_atomic_int_set (&self->processing, FALSE);
839 ret = GST_FLOW_ERROR;
840 goto drop;
841 }
842 process_failed:
843 {
844 GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
845 (_("Failed to process frame.")),
846 ("Maybe be due to not enough memory or failing driver"));
847 ret = GST_FLOW_ERROR;
848 goto drop;
849 }
850 drop:
851 {
852 gst_video_encoder_finish_frame (encoder, frame);
853 return ret;
854 }
855 }
856
857 static gboolean
gst_v4l2_video_enc_decide_allocation(GstVideoEncoder * encoder,GstQuery * query)858 gst_v4l2_video_enc_decide_allocation (GstVideoEncoder *
859 encoder, GstQuery * query)
860 {
861 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
862 GstVideoCodecState *state = gst_video_encoder_get_output_state (encoder);
863 GstCaps *caps;
864 GstV4l2Error error = GST_V4L2_ERROR_INIT;
865 GstClockTime latency;
866 gboolean ret = FALSE;
867
868 /* We need to set the format here, since this is called right after
869 * GstVideoEncoder have set the width, height and framerate into the state
870 * caps. These are needed by the driver to calculate the buffer size and to
871 * implement bitrate adaptation. */
872 caps = gst_caps_copy (state->caps);
873 gst_structure_remove_field (gst_caps_get_structure (caps, 0), "colorimetry");
874 if (!gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) {
875 gst_v4l2_error (self, &error);
876 gst_caps_unref (caps);
877 ret = FALSE;
878 goto done;
879 }
880 gst_caps_unref (caps);
881
882 /* best effort */
883 gst_v4l2_object_setup_padding (self->v4l2capture);
884
885 if (gst_v4l2_object_decide_allocation (self->v4l2capture, query)) {
886 GstVideoEncoderClass *enc_class = GST_VIDEO_ENCODER_CLASS (parent_class);
887 ret = enc_class->decide_allocation (encoder, query);
888 }
889
890 /* FIXME This may not be entirely correct, as encoder may keep some
891 * observation without delaying the encoding. Linux Media API need some
892 * more work to explicitly expressed the decoder / encoder latency. This
893 * value will then become max latency, and the reported driver latency would
894 * become the min latency. */
895 if (!GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration))
896 self->v4l2capture->duration = gst_util_uint64_scale_int (GST_SECOND, 1, 25);
897 latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
898 gst_video_encoder_set_latency (encoder, latency, latency);
899 GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT,
900 GST_TIME_ARGS (latency));
901
902 done:
903 gst_video_codec_state_unref (state);
904 return ret;
905 }
906
907 static gboolean
gst_v4l2_video_enc_propose_allocation(GstVideoEncoder * encoder,GstQuery * query)908 gst_v4l2_video_enc_propose_allocation (GstVideoEncoder *
909 encoder, GstQuery * query)
910 {
911 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
912 gboolean ret = FALSE;
913
914 GST_DEBUG_OBJECT (self, "called");
915
916 if (query == NULL)
917 ret = TRUE;
918 else
919 ret = gst_v4l2_object_propose_allocation (self->v4l2output, query);
920
921 if (ret)
922 ret = GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
923 query);
924
925 return ret;
926 }
927
928 static gboolean
gst_v4l2_video_enc_src_query(GstVideoEncoder * encoder,GstQuery * query)929 gst_v4l2_video_enc_src_query (GstVideoEncoder * encoder, GstQuery * query)
930 {
931 gboolean ret = TRUE;
932 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
933 switch (GST_QUERY_TYPE (query)) {
934 case GST_QUERY_CAPS:{
935 GstCaps *filter, *result = NULL;
936 GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
937
938 gst_query_parse_caps (query, &filter);
939
940 /* FIXME Try and not probe the entire encoder, but only the implement
941 * subclass format */
942 if (self->probed_srccaps) {
943 GstCaps *tmpl = gst_pad_get_pad_template_caps (pad);
944 result = gst_caps_intersect (tmpl, self->probed_srccaps);
945 gst_caps_unref (tmpl);
946 } else
947 result = gst_pad_get_pad_template_caps (pad);
948
949 if (filter) {
950 GstCaps *tmp = result;
951 result =
952 gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
953 gst_caps_unref (tmp);
954 }
955
956 GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
957
958 gst_query_set_caps_result (query, result);
959 gst_caps_unref (result);
960 break;
961 }
962
963 default:
964 ret = GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query);
965 break;
966 }
967
968 return ret;
969 }
970
971 static gboolean
gst_v4l2_video_enc_sink_query(GstVideoEncoder * encoder,GstQuery * query)972 gst_v4l2_video_enc_sink_query (GstVideoEncoder * encoder, GstQuery * query)
973 {
974 gboolean ret = TRUE;
975 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
976
977 switch (GST_QUERY_TYPE (query)) {
978 case GST_QUERY_CAPS:{
979 GstCaps *filter, *result = NULL;
980 GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
981
982 gst_query_parse_caps (query, &filter);
983
984 if (self->probed_sinkcaps)
985 result = gst_caps_ref (self->probed_sinkcaps);
986 else
987 result = gst_pad_get_pad_template_caps (pad);
988
989 if (filter) {
990 GstCaps *tmp = result;
991 result =
992 gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
993 gst_caps_unref (tmp);
994 }
995
996 GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
997
998 gst_query_set_caps_result (query, result);
999 gst_caps_unref (result);
1000 break;
1001 }
1002
1003 default:
1004 ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query);
1005 break;
1006 }
1007
1008 return ret;
1009 }
1010
1011 static gboolean
gst_v4l2_video_enc_sink_event(GstVideoEncoder * encoder,GstEvent * event)1012 gst_v4l2_video_enc_sink_event (GstVideoEncoder * encoder, GstEvent * event)
1013 {
1014 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
1015 gboolean ret;
1016 GstEventType type = GST_EVENT_TYPE (event);
1017
1018 switch (type) {
1019 case GST_EVENT_FLUSH_START:
1020 GST_DEBUG_OBJECT (self, "flush start");
1021 gst_v4l2_object_unlock (self->v4l2output);
1022 gst_v4l2_object_unlock (self->v4l2capture);
1023 break;
1024 default:
1025 break;
1026 }
1027
1028 ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (encoder, event);
1029
1030 switch (type) {
1031 case GST_EVENT_FLUSH_START:
1032 gst_pad_stop_task (encoder->srcpad);
1033 GST_DEBUG_OBJECT (self, "flush start done");
1034 default:
1035 break;
1036 }
1037
1038 return ret;
1039 }
1040
1041 static GstStateChangeReturn
gst_v4l2_video_enc_change_state(GstElement * element,GstStateChange transition)1042 gst_v4l2_video_enc_change_state (GstElement * element,
1043 GstStateChange transition)
1044 {
1045 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (element);
1046
1047 if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
1048 g_atomic_int_set (&self->active, FALSE);
1049 gst_v4l2_object_unlock (self->v4l2output);
1050 gst_v4l2_object_unlock (self->v4l2capture);
1051 }
1052
1053 return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1054 }
1055
1056
1057 static void
gst_v4l2_video_enc_dispose(GObject * object)1058 gst_v4l2_video_enc_dispose (GObject * object)
1059 {
1060 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
1061
1062 gst_caps_replace (&self->probed_sinkcaps, NULL);
1063 gst_caps_replace (&self->probed_srccaps, NULL);
1064
1065 G_OBJECT_CLASS (parent_class)->dispose (object);
1066 }
1067
1068 static void
gst_v4l2_video_enc_finalize(GObject * object)1069 gst_v4l2_video_enc_finalize (GObject * object)
1070 {
1071 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
1072
1073 gst_v4l2_object_destroy (self->v4l2capture);
1074 gst_v4l2_object_destroy (self->v4l2output);
1075
1076 G_OBJECT_CLASS (parent_class)->finalize (object);
1077 }
1078
1079
1080 static void
gst_v4l2_video_enc_init(GstV4l2VideoEnc * self)1081 gst_v4l2_video_enc_init (GstV4l2VideoEnc * self)
1082 {
1083 /* V4L2 object are created in subinstance_init */
1084 }
1085
1086 static void
gst_v4l2_video_enc_subinstance_init(GTypeInstance * instance,gpointer g_class)1087 gst_v4l2_video_enc_subinstance_init (GTypeInstance * instance, gpointer g_class)
1088 {
1089 GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
1090 GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (instance);
1091
1092 self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
1093 GST_OBJECT (GST_VIDEO_ENCODER_SINK_PAD (self)),
1094 V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
1095 gst_v4l2_get_output, gst_v4l2_set_output, NULL);
1096 self->v4l2output->no_initial_format = TRUE;
1097 self->v4l2output->keep_aspect = FALSE;
1098
1099 self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
1100 GST_OBJECT (GST_VIDEO_ENCODER_SRC_PAD (self)),
1101 V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
1102 gst_v4l2_get_input, gst_v4l2_set_input, NULL);
1103 }
1104
1105 static void
gst_v4l2_video_enc_class_init(GstV4l2VideoEncClass * klass)1106 gst_v4l2_video_enc_class_init (GstV4l2VideoEncClass * klass)
1107 {
1108 GstElementClass *element_class;
1109 GObjectClass *gobject_class;
1110 GstVideoEncoderClass *video_encoder_class;
1111
1112 parent_class = g_type_class_peek_parent (klass);
1113
1114 element_class = (GstElementClass *) klass;
1115 gobject_class = (GObjectClass *) klass;
1116 video_encoder_class = (GstVideoEncoderClass *) klass;
1117
1118 GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_enc_debug, "v4l2videoenc", 0,
1119 "V4L2 Video Encoder");
1120
1121 gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_dispose);
1122 gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finalize);
1123 gobject_class->set_property =
1124 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_property);
1125 gobject_class->get_property =
1126 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_get_property);
1127
1128 video_encoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_open);
1129 video_encoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_close);
1130 video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_start);
1131 video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_stop);
1132 video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finish);
1133 video_encoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_flush);
1134 video_encoder_class->set_format =
1135 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_format);
1136 video_encoder_class->negotiate =
1137 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_negotiate);
1138 video_encoder_class->decide_allocation =
1139 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_decide_allocation);
1140 video_encoder_class->propose_allocation =
1141 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_propose_allocation);
1142 video_encoder_class->sink_query =
1143 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_query);
1144 video_encoder_class->src_query =
1145 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_src_query);
1146 video_encoder_class->sink_event =
1147 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_event);
1148 video_encoder_class->handle_frame =
1149 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_handle_frame);
1150
1151 element_class->change_state =
1152 GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_change_state);
1153
1154 gst_v4l2_object_install_m2m_properties_helper (gobject_class);
1155 }
1156
1157 static void
gst_v4l2_video_enc_subclass_init(gpointer g_class,gpointer data)1158 gst_v4l2_video_enc_subclass_init (gpointer g_class, gpointer data)
1159 {
1160 GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
1161 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
1162 GstV4l2VideoEncCData *cdata = data;
1163
1164 klass->default_device = cdata->device;
1165 klass->codec = cdata->codec;
1166
1167 gst_element_class_add_pad_template (element_class,
1168 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
1169 cdata->sink_caps));
1170 gst_element_class_add_pad_template (element_class,
1171 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
1172 cdata->src_caps));
1173
1174 gst_caps_unref (cdata->sink_caps);
1175 gst_caps_unref (cdata->src_caps);
1176 g_free (cdata);
1177 }
1178
1179 /* Probing functions */
1180 gboolean
gst_v4l2_is_video_enc(GstCaps * sink_caps,GstCaps * src_caps,GstCaps * codec_caps)1181 gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
1182 GstCaps * codec_caps)
1183 {
1184 gboolean ret = FALSE;
1185 gboolean (*check_caps) (const GstCaps *, const GstCaps *);
1186
1187 if (codec_caps) {
1188 check_caps = gst_caps_can_intersect;
1189 } else {
1190 codec_caps = gst_v4l2_object_get_codec_caps ();
1191 check_caps = gst_caps_is_subset;
1192 }
1193
1194 if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
1195 && check_caps (src_caps, codec_caps))
1196 ret = TRUE;
1197
1198 return ret;
1199 }
1200
1201 void
gst_v4l2_video_enc_register(GstPlugin * plugin,GType type,const char * codec_name,const gchar * basename,const gchar * device_path,const GstV4l2Codec * codec,gint video_fd,GstCaps * sink_caps,GstCaps * codec_caps,GstCaps * src_caps)1202 gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
1203 const char *codec_name, const gchar * basename, const gchar * device_path,
1204 const GstV4l2Codec * codec, gint video_fd, GstCaps * sink_caps,
1205 GstCaps * codec_caps, GstCaps * src_caps)
1206 {
1207 GstCaps *filtered_caps;
1208 GTypeQuery type_query;
1209 GTypeInfo type_info = { 0, };
1210 GType subtype;
1211 gchar *type_name;
1212 GstV4l2VideoEncCData *cdata;
1213 GValue value = G_VALUE_INIT;
1214
1215 filtered_caps = gst_caps_intersect (src_caps, codec_caps);
1216
1217 if (codec != NULL && video_fd != -1) {
1218 if (gst_v4l2_codec_probe_levels (codec, video_fd, &value)) {
1219 gst_caps_set_value (filtered_caps, "level", &value);
1220 g_value_unset (&value);
1221 }
1222
1223 if (gst_v4l2_codec_probe_profiles (codec, video_fd, &value)) {
1224 gst_caps_set_value (filtered_caps, "profile", &value);
1225 g_value_unset (&value);
1226 }
1227 }
1228
1229 cdata = g_new0 (GstV4l2VideoEncCData, 1);
1230 cdata->device = g_strdup (device_path);
1231 cdata->sink_caps = gst_caps_ref (sink_caps);
1232 cdata->src_caps = gst_caps_ref (filtered_caps);
1233 cdata->codec = codec;
1234
1235 g_type_query (type, &type_query);
1236 memset (&type_info, 0, sizeof (type_info));
1237 type_info.class_size = type_query.class_size;
1238 type_info.instance_size = type_query.instance_size;
1239 type_info.class_init = gst_v4l2_video_enc_subclass_init;
1240 type_info.class_data = cdata;
1241 type_info.instance_init = gst_v4l2_video_enc_subinstance_init;
1242
1243 /* The first encoder to be registered should use a constant name, like
1244 * v4l2h264enc, for any additional encoders, we create unique names. Encoder
1245 * names may change between boots, so this should help gain stable names for
1246 * the most common use cases. */
1247 type_name = g_strdup_printf ("v4l2%senc", codec_name);
1248
1249 if (g_type_from_name (type_name) != 0) {
1250 g_free (type_name);
1251 type_name = g_strdup_printf ("v4l2%s%senc", basename, codec_name);
1252 }
1253
1254 subtype = g_type_register_static (type, type_name, &type_info, 0);
1255
1256 if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype))
1257 GST_WARNING ("Failed to register plugin '%s'", type_name);
1258
1259 g_free (type_name);
1260 }
1261