• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GStreamer
2  * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
3  * Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
4  *
5  * This library is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU Library General Public
7  * License as published by the Free Software Foundation; either
8  * version 2 of the License, or (at your option) any later version.
9  *
10  * This library is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  * Library General Public License for more details.
14  *
15  * You should have received a copy of the GNU Library General Public
16  * License along with this library; if not, write to the
17  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18  * Boston, MA 02110-1301, USA.
19  */
20 
21 /**
22  * SECTION:element-mfvideosrc
23  * @title: mfvideosrc
24  *
25  * Provides video capture from the Microsoft Media Foundation API.
26  *
27  * ## Example pipelines
28  * |[
29  * gst-launch-1.0 -v mfvideosrc ! fakesink
30  * ]| Capture from the default video capture device and render to fakesink.
31  *
32  * |[
33  * gst-launch-1.0 -v mfvideosrc device-index=1 ! fakesink
34  * ]| Capture from the second video device (if available) and render to fakesink.
35  */
36 
37 #ifdef HAVE_CONFIG_H
38 #include "config.h"
39 #endif
40 
41 #include "gstmfconfig.h"
42 
43 #include "gstmfvideosrc.h"
44 #include "gstmfutils.h"
45 #include "gstmfsourceobject.h"
46 #include <string.h>
47 
48 GST_DEBUG_CATEGORY (gst_mf_video_src_debug);
49 #define GST_CAT_DEFAULT gst_mf_video_src_debug
50 
51 #if (GST_MF_WINAPI_APP && !GST_MF_WINAPI_DESKTOP)
52 /* FIXME: need support JPEG for UWP */
53 #define SRC_TEMPLATE_CAPS \
54     GST_VIDEO_CAPS_MAKE (GST_MF_VIDEO_FORMATS)
55 #else
56 #define SRC_TEMPLATE_CAPS \
57     GST_VIDEO_CAPS_MAKE (GST_MF_VIDEO_FORMATS) "; " \
58         "image/jpeg, width = " GST_VIDEO_SIZE_RANGE ", " \
59         "height = " GST_VIDEO_SIZE_RANGE ", " \
60         "framerate = " GST_VIDEO_FPS_RANGE
61 #endif
62 
63 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
64     GST_PAD_SRC,
65     GST_PAD_ALWAYS,
66     GST_STATIC_CAPS (SRC_TEMPLATE_CAPS));
67 
68 struct _GstMFVideoSrc
69 {
70   GstPushSrc parent;
71 
72   GstMFSourceObject *source;
73   gboolean started;
74   GstVideoInfo info;
75 
76   guint64 n_frames;
77   GstClockTime latency;
78 
79   /* properties */
80   gchar *device_path;
81   gchar *device_name;
82   gint device_index;
83   gpointer dispatcher;
84 };
85 
86 enum
87 {
88   PROP_0,
89   PROP_DEVICE_PATH,
90   PROP_DEVICE_NAME,
91   PROP_DEVICE_INDEX,
92   PROP_DISPATCHER,
93 };
94 
95 #define DEFAULT_DEVICE_PATH     NULL
96 #define DEFAULT_DEVICE_NAME     NULL
97 #define DEFAULT_DEVICE_INDEX    -1
98 
99 static void gst_mf_video_src_finalize (GObject * object);
100 static void gst_mf_video_src_get_property (GObject * object, guint prop_id,
101     GValue * value, GParamSpec * pspec);
102 static void gst_mf_video_src_set_property (GObject * object, guint prop_id,
103     const GValue * value, GParamSpec * pspec);
104 
105 static gboolean gst_mf_video_src_start (GstBaseSrc * src);
106 static gboolean gst_mf_video_src_stop (GstBaseSrc * src);
107 static gboolean gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps);
108 static GstCaps *gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter);
109 static GstCaps *gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps);
110 static gboolean gst_mf_video_src_unlock (GstBaseSrc * src);
111 static gboolean gst_mf_video_src_unlock_stop (GstBaseSrc * src);
112 static gboolean gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query);
113 
114 static GstFlowReturn gst_mf_video_src_create (GstPushSrc * pushsrc,
115     GstBuffer ** buffer);
116 
117 #define gst_mf_video_src_parent_class parent_class
118 G_DEFINE_TYPE (GstMFVideoSrc, gst_mf_video_src, GST_TYPE_PUSH_SRC);
119 
120 static void
gst_mf_video_src_class_init(GstMFVideoSrcClass * klass)121 gst_mf_video_src_class_init (GstMFVideoSrcClass * klass)
122 {
123   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
124   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
125   GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS (klass);
126   GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS (klass);
127 
128   gobject_class->finalize = gst_mf_video_src_finalize;
129   gobject_class->get_property = gst_mf_video_src_get_property;
130   gobject_class->set_property = gst_mf_video_src_set_property;
131 
132   g_object_class_install_property (gobject_class, PROP_DEVICE_PATH,
133       g_param_spec_string ("device-path", "Device Path",
134           "The device path", DEFAULT_DEVICE_PATH,
135           G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
136           G_PARAM_STATIC_STRINGS));
137   g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
138       g_param_spec_string ("device-name", "Device Name",
139           "The human-readable device name", DEFAULT_DEVICE_NAME,
140           G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
141           G_PARAM_STATIC_STRINGS));
142   g_object_class_install_property (gobject_class, PROP_DEVICE_INDEX,
143       g_param_spec_int ("device-index", "Device Index",
144           "The zero-based device index", -1, G_MAXINT, DEFAULT_DEVICE_INDEX,
145           G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
146           G_PARAM_STATIC_STRINGS));
147 #if GST_MF_WINAPI_APP
148   /**
149    * GstMFVideoSrc:dispatcher:
150    *
151    * ICoreDispatcher COM object used for activating device from UI thread.
152    *
153    * Since: 1.18
154    */
155   g_object_class_install_property (gobject_class, PROP_DISPATCHER,
156       g_param_spec_pointer ("dispatcher", "Dispatcher",
157           "ICoreDispatcher COM object to use. In order for application to ask "
158           "permission of capture device, device activation should be running "
159           "on UI thread via ICoreDispatcher. This element will increase "
160           "the reference count of given ICoreDispatcher and release it after "
161           "use. Therefore, caller does not need to consider additional "
162           "reference count management",
163           GST_PARAM_CONDITIONALLY_AVAILABLE | GST_PARAM_MUTABLE_READY |
164           G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
165 #endif
166 
167   gst_element_class_set_static_metadata (element_class,
168       "Media Foundation Video Source",
169       "Source/Video/Hardware",
170       "Capture video stream through Windows Media Foundation",
171       "Seungha Yang <seungha.yang@navercorp.com>");
172 
173   gst_element_class_add_static_pad_template (element_class, &src_template);
174 
175   basesrc_class->start = GST_DEBUG_FUNCPTR (gst_mf_video_src_start);
176   basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_stop);
177   basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_set_caps);
178   basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_mf_video_src_get_caps);
179   basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_mf_video_src_fixate);
180   basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock);
181   basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_mf_video_src_unlock_stop);
182   basesrc_class->query = GST_DEBUG_FUNCPTR (gst_mf_video_src_query);
183 
184   pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_mf_video_src_create);
185 
186   GST_DEBUG_CATEGORY_INIT (gst_mf_video_src_debug, "mfvideosrc", 0,
187       "mfvideosrc");
188 }
189 
190 static void
gst_mf_video_src_init(GstMFVideoSrc * self)191 gst_mf_video_src_init (GstMFVideoSrc * self)
192 {
193   gst_base_src_set_format (GST_BASE_SRC (self), GST_FORMAT_TIME);
194   gst_base_src_set_live (GST_BASE_SRC (self), TRUE);
195 
196   self->device_index = DEFAULT_DEVICE_INDEX;
197 }
198 
199 static void
gst_mf_video_src_finalize(GObject * object)200 gst_mf_video_src_finalize (GObject * object)
201 {
202   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
203 
204   g_free (self->device_name);
205   g_free (self->device_path);
206 
207   G_OBJECT_CLASS (parent_class)->finalize (object);
208 }
209 
210 static void
gst_mf_video_src_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)211 gst_mf_video_src_get_property (GObject * object, guint prop_id, GValue * value,
212     GParamSpec * pspec)
213 {
214   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
215 
216   switch (prop_id) {
217     case PROP_DEVICE_PATH:
218       g_value_set_string (value, self->device_path);
219       break;
220     case PROP_DEVICE_NAME:
221       g_value_set_string (value, self->device_name);
222       break;
223     case PROP_DEVICE_INDEX:
224       g_value_set_int (value, self->device_index);
225       break;
226     default:
227       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
228       break;
229   }
230 }
231 
232 static void
gst_mf_video_src_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)233 gst_mf_video_src_set_property (GObject * object, guint prop_id,
234     const GValue * value, GParamSpec * pspec)
235 {
236   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (object);
237 
238   switch (prop_id) {
239     case PROP_DEVICE_PATH:
240       g_free (self->device_path);
241       self->device_path = g_value_dup_string (value);
242       break;
243     case PROP_DEVICE_NAME:
244       g_free (self->device_name);
245       self->device_name = g_value_dup_string (value);
246       break;
247     case PROP_DEVICE_INDEX:
248       self->device_index = g_value_get_int (value);
249       break;
250 #if GST_MF_WINAPI_APP
251     case PROP_DISPATCHER:
252       self->dispatcher = g_value_get_pointer (value);
253       break;
254 #endif
255     default:
256       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
257       break;
258   }
259 }
260 
261 static gboolean
gst_mf_video_src_start(GstBaseSrc * src)262 gst_mf_video_src_start (GstBaseSrc * src)
263 {
264   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
265 
266   GST_DEBUG_OBJECT (self, "Start");
267 
268   self->source = gst_mf_source_object_new (GST_MF_SOURCE_TYPE_VIDEO,
269       self->device_index, self->device_name, self->device_path, NULL);
270 
271   self->n_frames = 0;
272   self->latency = 0;
273 
274   if (!self->source) {
275     GST_ERROR_OBJECT (self, "Couldn't create capture object");
276     return FALSE;
277   }
278 
279   gst_mf_source_object_set_client (self->source, GST_ELEMENT (self));
280 
281   return TRUE;
282 }
283 
284 static gboolean
gst_mf_video_src_stop(GstBaseSrc * src)285 gst_mf_video_src_stop (GstBaseSrc * src)
286 {
287   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
288 
289   GST_DEBUG_OBJECT (self, "Stop");
290 
291   if (self->source) {
292     gst_mf_source_object_stop (self->source);
293     gst_object_unref (self->source);
294     self->source = NULL;
295   }
296 
297   self->started = FALSE;
298 
299   return TRUE;
300 }
301 
302 static gboolean
gst_mf_video_src_set_caps(GstBaseSrc * src,GstCaps * caps)303 gst_mf_video_src_set_caps (GstBaseSrc * src, GstCaps * caps)
304 {
305   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
306 
307   GST_DEBUG_OBJECT (self, "Set caps %" GST_PTR_FORMAT, caps);
308 
309   if (!self->source) {
310     GST_ERROR_OBJECT (self, "No capture engine yet");
311     return FALSE;
312   }
313 
314   if (!gst_mf_source_object_set_caps (self->source, caps)) {
315     GST_ERROR_OBJECT (self, "CaptureEngine couldn't accept caps");
316     return FALSE;
317   }
318 
319   gst_video_info_from_caps (&self->info, caps);
320   if (GST_VIDEO_INFO_FORMAT (&self->info) != GST_VIDEO_FORMAT_ENCODED)
321     gst_base_src_set_blocksize (src, GST_VIDEO_INFO_SIZE (&self->info));
322 
323   return TRUE;
324 }
325 
326 static GstCaps *
gst_mf_video_src_get_caps(GstBaseSrc * src,GstCaps * filter)327 gst_mf_video_src_get_caps (GstBaseSrc * src, GstCaps * filter)
328 {
329   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
330   GstCaps *caps = NULL;
331 
332   if (self->source)
333     caps = gst_mf_source_object_get_caps (self->source);
334 
335   if (!caps)
336     caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src));
337 
338   if (filter) {
339     GstCaps *filtered =
340         gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
341     gst_caps_unref (caps);
342     caps = filtered;
343   }
344 
345   GST_DEBUG_OBJECT (self, "Returning caps %" GST_PTR_FORMAT, caps);
346 
347   return caps;
348 }
349 
350 static GstCaps *
gst_mf_video_src_fixate(GstBaseSrc * src,GstCaps * caps)351 gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
352 {
353   GstStructure *structure;
354   GstCaps *fixated_caps;
355   gint i;
356 
357   fixated_caps = gst_caps_make_writable (caps);
358 
359   for (i = 0; i < gst_caps_get_size (fixated_caps); ++i) {
360     structure = gst_caps_get_structure (fixated_caps, i);
361     gst_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
362     gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
363     gst_structure_fixate_field_nearest_fraction (structure, "framerate",
364         G_MAXINT, 1);
365   }
366 
367   fixated_caps = gst_caps_fixate (fixated_caps);
368 
369   return fixated_caps;
370 }
371 
372 static gboolean
gst_mf_video_src_unlock(GstBaseSrc * src)373 gst_mf_video_src_unlock (GstBaseSrc * src)
374 {
375   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
376 
377   if (self->source)
378     gst_mf_source_object_set_flushing (self->source, TRUE);
379 
380   return TRUE;
381 }
382 
383 static gboolean
gst_mf_video_src_unlock_stop(GstBaseSrc * src)384 gst_mf_video_src_unlock_stop (GstBaseSrc * src)
385 {
386   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
387 
388   if (self->source)
389     gst_mf_source_object_set_flushing (self->source, FALSE);
390 
391   return TRUE;
392 }
393 
394 static gboolean
gst_mf_video_src_query(GstBaseSrc * src,GstQuery * query)395 gst_mf_video_src_query (GstBaseSrc * src, GstQuery * query)
396 {
397   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (src);
398 
399   switch (GST_QUERY_TYPE (query)) {
400     case GST_QUERY_LATENCY:
401       if (self->started) {
402         gst_query_set_latency (query, TRUE, 0, self->latency);
403 
404         return TRUE;
405       }
406       break;
407     default:
408       break;
409   }
410 
411   return GST_BASE_SRC_CLASS (parent_class)->query (src, query);
412 }
413 
414 static GstFlowReturn
gst_mf_video_src_create(GstPushSrc * pushsrc,GstBuffer ** buffer)415 gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
416 {
417   GstMFVideoSrc *self = GST_MF_VIDEO_SRC (pushsrc);
418   GstFlowReturn ret = GST_FLOW_OK;
419   GstBuffer *buf = NULL;
420   GstClock *clock;
421   GstClockTime running_time = GST_CLOCK_TIME_NONE;
422   GstClockTimeDiff diff;
423 
424   if (!self->started) {
425     if (!gst_mf_source_object_start (self->source)) {
426       GST_ERROR_OBJECT (self, "Failed to start capture object");
427 
428       return GST_FLOW_ERROR;
429     }
430 
431     self->started = TRUE;
432   }
433 
434   if (GST_VIDEO_INFO_FORMAT (&self->info) != GST_VIDEO_FORMAT_ENCODED) {
435     ret = GST_BASE_SRC_CLASS (parent_class)->alloc (GST_BASE_SRC (self), 0,
436         GST_VIDEO_INFO_SIZE (&self->info), &buf);
437 
438     if (ret != GST_FLOW_OK)
439       return ret;
440 
441     ret = gst_mf_source_object_fill (self->source, buf);
442   } else {
443     ret = gst_mf_source_object_create (self->source, &buf);
444   }
445 
446   if (ret != GST_FLOW_OK)
447     return ret;
448 
449   GST_BUFFER_OFFSET (buf) = self->n_frames;
450   GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
451   self->n_frames++;
452 
453   GST_LOG_OBJECT (self,
454       "Captured buffer timestamp %" GST_TIME_FORMAT ", duration %"
455       GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
456       GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
457 
458   /* Update latency */
459   clock = gst_element_get_clock (GST_ELEMENT_CAST (self));
460   if (clock) {
461     GstClockTime now;
462 
463     now = gst_clock_get_time (clock);
464     running_time = now - GST_ELEMENT_CAST (self)->base_time;
465     gst_object_unref (clock);
466   }
467 
468   diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf), running_time);
469   if (diff > self->latency) {
470     self->latency = (GstClockTime) diff;
471     GST_DEBUG_OBJECT (self, "Updated latency value %" GST_TIME_FORMAT,
472         GST_TIME_ARGS (self->latency));
473   }
474 
475   *buffer = buf;
476 
477   return GST_FLOW_OK;
478 }
479