• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (C) <2018, 2019> Philippe Normand <philn@igalia.com>
2  * Copyright (C) <2018, 2019> Žan Doberšek <zdobersek@igalia.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 /**
21  * SECTION:element-wpesrc
22  * @title: wpesrc
23  *
24  * The wpesrc element is used to produce a video texture representing a web page
25  * rendered off-screen by WPE.
26  *
27  * Starting from WPEBackend-FDO 1.6.x, software rendering support is available.
28  * This features allows wpesrc to be used on machines without GPU, and/or for
29  * testing purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true`
30  * environment variable and make sure `video/x-raw, format=BGRA` caps are
31  * negotiated by the wpesrc element.
32  *
33  * ## Example launch lines
34  *
35  * ### Show the GStreamer website homepage
36  *
37  * ```
38  * gst-launch-1.0 -v wpesrc location="https://gstreamer.freedesktop.org" ! queue ! glimagesink
39  * ```
40  *
41  * ### Save the first 50 video frames generated for the GStreamer website as PNG files in /tmp
42  *
43  * ```
44  * LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -v wpesrc num-buffers=50 location="https://gstreamer.freedesktop.org" ! videoconvert ! pngenc ! multifilesink location=/tmp/snapshot-%05d.png
45  * ```
46  *
47  *
48  * ### Show the GStreamer website homepage as played with GstPlayer in a GTK+ window
49  *
50  * ```
51  * gst-play-1.0 --videosink gtkglsink wpe://https://gstreamer.freedesktop.org
52  * ```
53  *
54  * The `web://` URI protocol is also supported, as an alias to `wpe://`. Since: 1.20
55  *
56  * ### Composite WPE with a video stream in a single OpenGL scene
57  *
58  * ```
59  * gst-launch-1.0  glvideomixer name=m sink_1::zorder=0 ! glimagesink wpesrc location="file:///home/phil/Downloads/plunk/index.html" draw-background=0 ! m. videotestsrc ! queue ! glupload ! glcolorconvert ! m.
60  * ```
61  *
62  *
63  * ### Composite WPE with a video stream, sink_0 pad properties have to match the video dimensions
64  *
65  * ```
66  * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 sink_0::height=818 sink_0::width=1920 ! gtkglsink wpesrc location="file:///home/phil/Downloads/plunk/index.html" draw-background=0 ! m. uridecodebin uri="http://192.168.1.44/Sintel.2010.1080p.mkv" name=d d. ! queue ! glupload ! glcolorconvert ! m.
67  * ```
68  *
69  * Additionally, any audio stream created by WPE is exposed as "sometimes" audio
70  * source pads.
71  *
72  * This source also relays GStreamer bus messages from the GStreamer pipelines
73  * running inside the web pages  as [element custom](gst_message_new_custom)
74  * messages which structure is called `WpeForwarded` and has the following
75  * fields:
76  *
77  * * `message`: The original #GstMessage
78  * * `wpesrc-original-src-name`: Name of the original element posting the
79  *   message
80  * * `wpesrc-original-src-type`: Name of the GType of the original element
81  *   posting the message
82  * * `wpesrc-original-src-path`: [Path](gst_object_get_path_string) of the
83  *   original element positing the message
84  *
85  * Note: This feature will be disabled if you disable the tracer subsystem.
86  */
87 
88 #include "gstwpesrcbin.h"
89 #include "gstwpevideosrc.h"
90 #include "gstwpe.h"
91 #include "WPEThreadedView.h"
92 
93 #include <gst/allocators/allocators.h>
94 #include <gst/base/gstflowcombiner.h>
95 #include <wpe/extensions/audio.h>
96 
97 #include <sys/mman.h>
98 #include <unistd.h>
99 
100 G_DEFINE_TYPE (GstWpeAudioPad, gst_wpe_audio_pad, GST_TYPE_GHOST_PAD);
101 
102 static void
gst_wpe_audio_pad_class_init(GstWpeAudioPadClass * klass)103 gst_wpe_audio_pad_class_init (GstWpeAudioPadClass * klass)
104 {
105 }
106 
107 static void
gst_wpe_audio_pad_init(GstWpeAudioPad * pad)108 gst_wpe_audio_pad_init (GstWpeAudioPad * pad)
109 {
110   gst_audio_info_init (&pad->info);
111   pad->discont_pending = FALSE;
112   pad->buffer_time = 0;
113 }
114 
115 static GstWpeAudioPad *
gst_wpe_audio_pad_new(const gchar * name)116 gst_wpe_audio_pad_new (const gchar * name)
117 {
118   GstWpeAudioPad *pad = GST_WPE_AUDIO_PAD (g_object_new (gst_wpe_audio_pad_get_type (),
119     "name", name, "direction", GST_PAD_SRC, NULL));
120 
121   if (!gst_ghost_pad_construct (GST_GHOST_PAD (pad))) {
122     gst_object_unref (pad);
123     return NULL;
124   }
125 
126   return pad;
127 }
128 
129 struct _GstWpeSrc
130 {
131   GstBin parent;
132 
133   GstAllocator *fd_allocator;
134   GstElement *video_src;
135   GHashTable *audio_src_pads;
136   GstFlowCombiner *flow_combiner;
137   gchar *uri;
138 };
139 
140 enum
141 {
142  PROP_0,
143  PROP_LOCATION,
144  PROP_DRAW_BACKGROUND
145 };
146 
147 enum
148 {
149  SIGNAL_LOAD_BYTES,
150  LAST_SIGNAL
151 };
152 
153 static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
154 
155 static void gst_wpe_src_uri_handler_init (gpointer iface, gpointer data);
156 
157 GST_DEBUG_CATEGORY_EXTERN (wpe_src_debug);
158 #define GST_CAT_DEFAULT wpe_src_debug
159 
160 #define gst_wpe_src_parent_class parent_class
161 G_DEFINE_TYPE_WITH_CODE (GstWpeSrc, gst_wpe_src, GST_TYPE_BIN,
162     G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_wpe_src_uri_handler_init));
163 
164 /**
165  * GstWpeSrc!video
166  *
167  * Since: 1.20
168  */
169 static GstStaticPadTemplate video_src_factory =
170 GST_STATIC_PAD_TEMPLATE ("video", GST_PAD_SRC, GST_PAD_ALWAYS,
171     GST_STATIC_CAPS ("video/x-raw(memory:GLMemory), "
172                      "format = (string) RGBA, "
173                      "width = " GST_VIDEO_SIZE_RANGE ", "
174                      "height = " GST_VIDEO_SIZE_RANGE ", "
175                      "framerate = " GST_VIDEO_FPS_RANGE ", "
176                      "pixel-aspect-ratio = (fraction)1/1,"
177                      "texture-target = (string)2D"
178                      "; video/x-raw, "
179                      "format = (string) BGRA, "
180                      "width = " GST_VIDEO_SIZE_RANGE ", "
181                      "height = " GST_VIDEO_SIZE_RANGE ", "
182                      "framerate = " GST_VIDEO_FPS_RANGE ", "
183                      "pixel-aspect-ratio = (fraction)1/1"
184                      ));
185 
186 /**
187  * GstWpeSrc!audio_%u
188  *
189  * Each audio stream in the renderer web page will expose the and `audio_%u`
190  * #GstPad.
191  *
192  * Since: 1.20
193  */
194 static GstStaticPadTemplate audio_src_factory =
195 GST_STATIC_PAD_TEMPLATE ("audio_%u", GST_PAD_SRC, GST_PAD_SOMETIMES,
196     GST_STATIC_CAPS ( \
197         GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F32)) ", layout=(string)interleaved; " \
198         GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F64)) ", layout=(string)interleaved; " \
199         GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (S16)) ", layout=(string)interleaved" \
200 ));
201 
202 static GstFlowReturn
gst_wpe_src_chain_buffer(GstPad * pad,GstObject * parent,GstBuffer * buffer)203 gst_wpe_src_chain_buffer (GstPad * pad, GstObject * parent, GstBuffer * buffer)
204 {
205   GstWpeSrc *src = GST_WPE_SRC (gst_object_get_parent (parent));
206   GstFlowReturn result, chain_result;
207 
208   chain_result = gst_proxy_pad_chain_default (pad, GST_OBJECT_CAST (src), buffer);
209   result = gst_flow_combiner_update_pad_flow (src->flow_combiner, pad, chain_result);
210   gst_object_unref (src);
211 
212   if (result == GST_FLOW_FLUSHING)
213     return chain_result;
214 
215   return result;
216 }
217 
218 void
gst_wpe_src_new_audio_stream(GstWpeSrc * src,guint32 id,GstCaps * caps,const gchar * stream_id)219 gst_wpe_src_new_audio_stream (GstWpeSrc *src, guint32 id, GstCaps *caps, const gchar *stream_id)
220 {
221   GstWpeAudioPad *audio_pad;
222   GstPad *pad;
223   gchar *name;
224   GstEvent *stream_start;
225   GstSegment segment;
226 
227   name = g_strdup_printf ("audio_%u", id);
228   audio_pad = gst_wpe_audio_pad_new (name);
229   pad = GST_PAD_CAST (audio_pad);
230   g_free (name);
231 
232   gst_pad_set_active (pad, TRUE);
233   gst_element_add_pad (GST_ELEMENT_CAST (src), pad);
234   gst_flow_combiner_add_pad (src->flow_combiner, pad);
235 
236   GST_DEBUG_OBJECT (src, "Adding pad: %" GST_PTR_FORMAT, pad);
237 
238   stream_start = gst_event_new_stream_start (stream_id);
239   gst_pad_push_event (pad, stream_start);
240 
241   gst_audio_info_from_caps (&audio_pad->info, caps);
242   gst_pad_push_event (pad, gst_event_new_caps (caps));
243 
244   gst_segment_init (&segment, GST_FORMAT_TIME);
245   gst_pad_push_event (pad, gst_event_new_segment (&segment));
246 
247   g_hash_table_insert (src->audio_src_pads, GUINT_TO_POINTER (id), audio_pad);
248 }
249 
250 void
gst_wpe_src_set_audio_shm(GstWpeSrc * src,GUnixFDList * fds,guint32 id)251 gst_wpe_src_set_audio_shm (GstWpeSrc* src, GUnixFDList *fds, guint32 id)
252 {
253   gint fd;
254   GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
255 
256   g_return_if_fail (GST_IS_WPE_SRC (src));
257   g_return_if_fail (fds);
258   g_return_if_fail (g_unix_fd_list_get_length (fds) == 1);
259   g_return_if_fail (audio_pad->fd <= 0);
260 
261   fd = g_unix_fd_list_get (fds, 0, NULL);
262   g_return_if_fail (fd >= 0);
263 
264   if (audio_pad->fd > 0)
265     close(audio_pad->fd);
266 
267   audio_pad->fd = dup(fd);
268 }
269 
270 void
gst_wpe_src_push_audio_buffer(GstWpeSrc * src,guint32 id,guint64 size)271 gst_wpe_src_push_audio_buffer (GstWpeSrc* src, guint32 id, guint64 size)
272 {
273   GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
274   GstBuffer *buffer;
275 
276   g_return_if_fail (audio_pad->fd > 0);
277 
278   GST_TRACE_OBJECT (audio_pad, "Handling incoming audio packet");
279 
280   gpointer data = mmap (0, size, PROT_READ, MAP_PRIVATE, audio_pad->fd, 0);
281   buffer = gst_buffer_new_memdup (data, size);
282   munmap (data, size);
283   gst_buffer_add_audio_meta (buffer, &audio_pad->info, size, NULL);
284 
285   audio_pad->buffer_time = gst_element_get_current_running_time (GST_ELEMENT (src));
286   GST_BUFFER_DTS (buffer) = audio_pad->buffer_time;
287   GST_BUFFER_PTS (buffer) = audio_pad->buffer_time;
288 
289   GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
290   if (audio_pad->discont_pending) {
291     GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
292     audio_pad->discont_pending = FALSE;
293   }
294 
295   gst_flow_combiner_update_pad_flow (src->flow_combiner, GST_PAD (audio_pad),
296     gst_pad_push (GST_PAD_CAST (audio_pad), buffer));
297 }
298 
299 static void
gst_wpe_src_remove_audio_pad(GstWpeSrc * src,GstPad * pad)300 gst_wpe_src_remove_audio_pad (GstWpeSrc *src, GstPad *pad)
301 {
302   GST_DEBUG_OBJECT (src, "Removing pad: %" GST_PTR_FORMAT, pad);
303   gst_element_remove_pad (GST_ELEMENT_CAST (src), pad);
304   gst_flow_combiner_remove_pad (src->flow_combiner, pad);
305 }
306 
307 void
gst_wpe_src_stop_audio_stream(GstWpeSrc * src,guint32 id)308 gst_wpe_src_stop_audio_stream(GstWpeSrc* src, guint32 id)
309 {
310   GstPad *pad = GST_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
311   g_return_if_fail (GST_IS_PAD (pad));
312 
313   GST_INFO_OBJECT(pad, "Stopping");
314   gst_pad_push_event (pad, gst_event_new_eos ());
315   gst_wpe_src_remove_audio_pad (src, pad);
316   g_hash_table_remove (src->audio_src_pads, GUINT_TO_POINTER (id));
317 }
318 
319 void
gst_wpe_src_pause_audio_stream(GstWpeSrc * src,guint32 id)320 gst_wpe_src_pause_audio_stream(GstWpeSrc* src, guint32 id)
321 {
322   GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id)));
323   GstPad *pad = GST_PAD_CAST (audio_pad);
324   g_return_if_fail (GST_IS_PAD (pad));
325 
326   GST_INFO_OBJECT(pad, "Pausing");
327   gst_pad_push_event (pad, gst_event_new_gap (audio_pad->buffer_time, GST_CLOCK_TIME_NONE));
328 
329   audio_pad->discont_pending = TRUE;
330 }
331 
332 static void
gst_wpe_src_load_bytes(GstWpeVideoSrc * src,GBytes * bytes)333 gst_wpe_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
334 {
335   GstWpeSrc *self = GST_WPE_SRC (src);
336 
337   if (self->video_src)
338     g_signal_emit_by_name (self->video_src, "load-bytes", bytes, NULL);
339 }
340 
341 static void
gst_wpe_src_set_location(GstWpeSrc * src,const gchar * location)342 gst_wpe_src_set_location (GstWpeSrc * src, const gchar * location)
343 {
344   g_object_set (src->video_src, "location", location, NULL);
345 }
346 
347 static void
gst_wpe_src_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)348 gst_wpe_src_get_property (GObject * object, guint prop_id,
349     GValue * value, GParamSpec * pspec)
350 {
351   GstWpeSrc *self = GST_WPE_SRC (object);
352 
353   if (self->video_src)
354     g_object_get_property (G_OBJECT (self->video_src), pspec->name, value);
355 }
356 
357 static void
gst_wpe_src_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)358 gst_wpe_src_set_property (GObject * object, guint prop_id,
359     const GValue * value, GParamSpec * pspec)
360 {
361   GstWpeSrc *self = GST_WPE_SRC (object);
362 
363   if (self->video_src) {
364     if (prop_id == PROP_LOCATION)
365       gst_wpe_src_set_location (self, g_value_get_string (value));
366     else
367       g_object_set_property (G_OBJECT (self->video_src), pspec->name, value);
368   }
369 }
370 
371 static GstURIType
gst_wpe_src_uri_get_type(GType)372 gst_wpe_src_uri_get_type (GType)
373 {
374   return GST_URI_SRC;
375 }
376 
377 static const gchar *const *
gst_wpe_src_get_protocols(GType)378 gst_wpe_src_get_protocols (GType)
379 {
380   static const char *protocols[] = { "wpe", "web", NULL };
381   return protocols;
382 }
383 
384 static gchar *
gst_wpe_src_get_uri(GstURIHandler * handler)385 gst_wpe_src_get_uri (GstURIHandler * handler)
386 {
387   GstWpeSrc *src = GST_WPE_SRC (handler);
388 
389   return g_strdup (src->uri);
390 }
391 
392 static gboolean
gst_wpe_src_set_uri(GstURIHandler * handler,const gchar * uri,GError ** error)393 gst_wpe_src_set_uri (GstURIHandler * handler, const gchar * uri,
394     GError ** error)
395 {
396   GstWpeSrc *src = GST_WPE_SRC (handler);
397 
398   if (src->uri) {
399     g_free (src->uri);
400   }
401   src->uri = g_strdup (uri);
402   gst_wpe_src_set_location(src, uri + 6);
403   return TRUE;
404 }
405 
406 static void
gst_wpe_src_uri_handler_init(gpointer iface_ptr,gpointer data)407 gst_wpe_src_uri_handler_init (gpointer iface_ptr, gpointer data)
408 {
409   GstURIHandlerInterface *iface = (GstURIHandlerInterface *) iface_ptr;
410 
411   iface->get_type = gst_wpe_src_uri_get_type;
412   iface->get_protocols = gst_wpe_src_get_protocols;
413   iface->get_uri = gst_wpe_src_get_uri;
414   iface->set_uri = gst_wpe_src_set_uri;
415 }
416 
417 static void
gst_wpe_src_init(GstWpeSrc * src)418 gst_wpe_src_init (GstWpeSrc * src)
419 {
420   GstPad *pad;
421   GstPad *ghost_pad;
422   GstProxyPad *proxy_pad;
423 
424   gst_bin_set_suppressed_flags (GST_BIN_CAST (src),
425       static_cast<GstElementFlags>(GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK));
426   GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
427 
428   src->fd_allocator = gst_fd_allocator_new ();
429   src->audio_src_pads = g_hash_table_new (g_direct_hash, g_direct_equal);
430   src->flow_combiner = gst_flow_combiner_new ();
431   src->video_src = gst_element_factory_make ("wpevideosrc", NULL);
432 
433   gst_bin_add (GST_BIN_CAST (src), src->video_src);
434 
435   pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src->video_src), "src");
436   ghost_pad = gst_ghost_pad_new_from_template ("video", pad,
437     gst_static_pad_template_get (&video_src_factory));
438   proxy_pad = gst_proxy_pad_get_internal (GST_PROXY_PAD (ghost_pad));
439   gst_pad_set_active (GST_PAD_CAST (proxy_pad), TRUE);
440 
441   gst_element_add_pad (GST_ELEMENT_CAST (src), GST_PAD_CAST (ghost_pad));
442   gst_flow_combiner_add_pad (src->flow_combiner, GST_PAD_CAST (ghost_pad));
443   gst_pad_set_chain_function (GST_PAD_CAST (proxy_pad), gst_wpe_src_chain_buffer);
444 
445   gst_object_unref (proxy_pad);
446   gst_object_unref (pad);
447 }
448 
449 static gboolean
gst_wpe_audio_remove_audio_pad(gint32 * id,GstPad * pad,GstWpeSrc * self)450 gst_wpe_audio_remove_audio_pad  (gint32  *id, GstPad *pad, GstWpeSrc  *self)
451 {
452   gst_wpe_src_remove_audio_pad (self, pad);
453 
454   return TRUE;
455 }
456 
457 static GstStateChangeReturn
gst_wpe_src_change_state(GstElement * element,GstStateChange transition)458 gst_wpe_src_change_state (GstElement * element, GstStateChange transition)
459 {
460   GstStateChangeReturn result;
461   GstWpeSrc *src = GST_WPE_SRC (element);
462 
463   GST_DEBUG_OBJECT (src, "%s", gst_state_change_get_name (transition));
464   result = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS , change_state, (element, transition), GST_STATE_CHANGE_FAILURE);
465 
466   switch (transition) {
467   case GST_STATE_CHANGE_PAUSED_TO_READY:{
468     g_hash_table_foreach_remove (src->audio_src_pads, (GHRFunc) gst_wpe_audio_remove_audio_pad, src);
469     gst_flow_combiner_reset (src->flow_combiner);
470     break;
471   }
472   default:
473     break;
474   }
475 
476   return result;
477 }
478 
479 static void
gst_wpe_src_finalize(GObject * object)480 gst_wpe_src_finalize (GObject *object)
481 {
482     GstWpeSrc *src = GST_WPE_SRC (object);
483 
484     g_hash_table_unref (src->audio_src_pads);
485     gst_flow_combiner_free (src->flow_combiner);
486     gst_object_unref (src->fd_allocator);
487     g_free (src->uri);
488 
489     GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
490 }
491 
492 static void
gst_wpe_src_class_init(GstWpeSrcClass * klass)493 gst_wpe_src_class_init (GstWpeSrcClass * klass)
494 {
495   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
496   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
497 
498   gobject_class->set_property = gst_wpe_src_set_property;
499   gobject_class->get_property = gst_wpe_src_get_property;
500   gobject_class->finalize = gst_wpe_src_finalize;
501 
502   g_object_class_install_property (gobject_class, PROP_LOCATION,
503       g_param_spec_string ("location", "location", "The URL to display", "",
504           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
505   g_object_class_install_property (gobject_class, PROP_DRAW_BACKGROUND,
506       g_param_spec_boolean ("draw-background", "Draws the background",
507           "Whether to draw the WebView background", TRUE,
508           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
509 
510   gst_element_class_set_static_metadata (element_class, "WPE source",
511       "Source/Video/Audio", "Creates Audio/Video streams from a web"
512       " page using WPE web engine",
513       "Philippe Normand <philn@igalia.com>, Žan Doberšek "
514       "<zdobersek@igalia.com>");
515 
516   /**
517    * GstWpeSrc::load-bytes:
518    * @src: the object which received the signal
519    * @bytes: the GBytes data to load
520    *
521    * Load the specified bytes into the internal webView.
522    */
523   gst_wpe_video_src_signals[SIGNAL_LOAD_BYTES] =
524       g_signal_new_class_handler ("load-bytes", G_TYPE_FROM_CLASS (klass),
525       static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
526       G_CALLBACK (gst_wpe_src_load_bytes), NULL, NULL, NULL, G_TYPE_NONE, 1,
527       G_TYPE_BYTES);
528 
529   element_class->change_state = GST_DEBUG_FUNCPTR (gst_wpe_src_change_state);
530 
531   gst_element_class_add_static_pad_template (element_class, &video_src_factory);
532   gst_element_class_add_static_pad_template (element_class, &audio_src_factory);
533 }
534