• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright (C) <2018> Philippe Normand <philn@igalia.com>
2  * Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 /**
21  * SECTION:element-wpevideosrc
22  * @title: wpevideosrc
23  *
24  * The wpevideosrc element is used to produce a video texture representing a web page
25  * rendered off-screen by WPE.
26  *
27  * Starting from WPEBackend-FDO 1.6.x, software rendering support is available. This
28  * features allows wpevideosrc to be used on machines without GPU, and/or for testing
29  * purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true` environment
30  * variable and make sure `video/x-raw, format=BGRA` caps are negotiated by the
31  * wpevideosrc element.
32  *
33  * As the webview loading is usually not instantaneous, the wpevideosrc element emits
34  * messages indicating the load progress, in percent. The value is an estimate
35  * based on the total number of bytes expected to be received for a document,
36  * including all its possible subresources and child documents. The application
37  * can handle these `element` messages synchronously for instance, in order to
38  * display a progress bar or other visual load indicator. The load percent value
39  * is stored in the message structure as a double value named
40  * `estimated-load-progress` and the structure name is `wpe-stats`.
41  *
42  * ## Example launch lines
43  *
44  * ```shell
45  * gst-launch-1.0 -v wpevideosrc location="https://gstreamer.freedesktop.org" ! queue ! glimagesink
46  * ```
47  * Shows the GStreamer website homepage
48  *
49  * ```shell
50  * LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -v wpevideosrc num-buffers=50 location="https://gstreamer.freedesktop.org" \
51  *   videoconvert ! pngenc ! multifilesink location=/tmp/snapshot-%05d.png
52  * ```
53  * Saves the first 50 video frames generated for the GStreamer website as PNG files in /tmp.
54  *
55  * ```shell
56  * gst-play-1.0 --videosink gtkglsink wpe://https://gstreamer.freedesktop.org
57  * ```
58  * Shows the GStreamer website homepage as played with GstPlayer in a GTK+ window.
59  *
60  * ```shell
61  * gst-launch-1.0  glvideomixer name=m sink_1::zorder=0 ! glimagesink wpevideosrc location="file:///tmp/asset.html" draw-background=0 \
62  *   ! m. videotestsrc ! queue ! glupload ! glcolorconvert ! m.
63  * ```
64  * Composite WPE with a video stream in a single OpenGL scene.
65  *
66  * ```shell
67  * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 sink_0::height=818 sink_0::width=1920 ! gtkglsink \
68  *    wpevideosrc location="file:///tmp/asset.html" draw-background=0 ! m.
69  *    uridecodebin uri="http://example.com/Sintel.2010.1080p.mkv" name=d d. ! queue ! glupload ! glcolorconvert ! m.
70  * ```
71  * Composite WPE with a video stream, sink_0 pad properties have to match the video dimensions.
72  *
73  * Since: 1.16
74  */
75 
76 /*
77  * TODO:
78  * - DMABuf support (requires changes in WPEBackend-fdo to expose DMABuf planes and fds)
79  * - Custom EGLMemory allocator
80  * - Better navigation events handling (would require a new GstNavigation API)
81  */
82 
83 #ifdef HAVE_CONFIG_H
84 #include <config.h>
85 #endif
86 
87 #include "gstwpevideosrc.h"
88 #include <gst/gl/gl.h>
89 #include <gst/gl/egl/gstglmemoryegl.h>
90 #include <gst/gl/wayland/gstgldisplay_wayland.h>
91 #include <gst/video/video.h>
92 #include <xkbcommon/xkbcommon.h>
93 
94 #include "WPEThreadedView.h"
95 
96 #define DEFAULT_WIDTH 1920
97 #define DEFAULT_HEIGHT 1080
98 #define DEFAULT_FPS_N 30
99 #define DEFAULT_FPS_D 1
100 #define DEFAULT_DRAW_BACKGROUND TRUE
101 
102 enum
103 {
104   PROP_0,
105   PROP_LOCATION,
106   PROP_DRAW_BACKGROUND
107 };
108 
109 enum
110 {
111   SIGNAL_CONFIGURE_WEB_VIEW,
112   SIGNAL_LOAD_BYTES,
113   LAST_SIGNAL
114 };
115 static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
116 
117 struct _GstWpeVideoSrc
118 {
119   GstGLBaseSrc parent;
120 
121   /* properties */
122   gchar *location;
123   gboolean draw_background;
124 
125   GBytes *bytes;
126   gboolean gl_enabled;
127 
128   gint64 n_frames;              /* total frames sent */
129 
130   WPEView *view;
131 
132   GMutex lock;
133 };
134 
135 #define WPE_LOCK(o) g_mutex_lock(&(o)->lock)
136 #define WPE_UNLOCK(o) g_mutex_unlock(&(o)->lock)
137 
138 GST_DEBUG_CATEGORY_EXTERN (wpe_video_src_debug);
139 #define GST_CAT_DEFAULT wpe_video_src_debug
140 
141 #define gst_wpe_video_src_parent_class parent_class
142 G_DEFINE_TYPE (GstWpeVideoSrc, gst_wpe_video_src, GST_TYPE_GL_BASE_SRC);
143 
144 #define WPE_RAW_CAPS "video/x-raw, "            \
145   "format = (string) BGRA, "                    \
146   "width = " GST_VIDEO_SIZE_RANGE ", "          \
147   "height = " GST_VIDEO_SIZE_RANGE ", "         \
148   "framerate = " GST_VIDEO_FPS_RANGE ", "       \
149   "pixel-aspect-ratio = (fraction)1/1"
150 
151 #define WPE_GL_CAPS "video/x-raw(memory:GLMemory), "    \
152   "format = (string) RGBA, "                            \
153   "width = " GST_VIDEO_SIZE_RANGE ", "                  \
154   "height = " GST_VIDEO_SIZE_RANGE ", "                 \
155   "framerate = " GST_VIDEO_FPS_RANGE ", "               \
156   "pixel-aspect-ratio = (fraction)1/1, texture-target = (string)2D"
157 
158 #define WPE_VIDEO_SRC_CAPS WPE_GL_CAPS "; " WPE_RAW_CAPS
159 #define WPE_VIDEO_SRC_DOC_CAPS WPE_GL_CAPS "; video/x-raw, format = (string) BGRA"
160 
161 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
162     GST_PAD_SRC,
163     GST_PAD_ALWAYS,
164     GST_STATIC_CAPS (WPE_VIDEO_SRC_CAPS));
165 
166 static GstFlowReturn
gst_wpe_video_src_create(GstBaseSrc * bsrc,guint64 offset,guint length,GstBuffer ** buf)167 gst_wpe_video_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
168     GstBuffer ** buf)
169 {
170   GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (bsrc);
171   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
172   GstFlowReturn ret = GST_FLOW_ERROR;
173   GstBuffer *locked_buffer;
174   GstClockTime next_time;
175   gint64 ts_offset = 0;
176 
177   WPE_LOCK (src);
178   if (src->gl_enabled) {
179     WPE_UNLOCK (src);
180     return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, create, (bsrc,
181             offset, length, buf), ret);
182   }
183 
184   locked_buffer = src->view->buffer ();
185   if (locked_buffer == NULL) {
186     WPE_UNLOCK (src);
187     GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
188         ("WPE View did not render a buffer"), (NULL));
189     return ret;
190   }
191   *buf = gst_buffer_copy_deep (locked_buffer);
192 
193   g_object_get (gl_src, "timestamp-offset", &ts_offset, NULL);
194 
195   /* The following code mimics the behaviour of GLBaseSrc::fill */
196   GST_BUFFER_TIMESTAMP (*buf) = ts_offset + gl_src->running_time;
197   GST_BUFFER_OFFSET (*buf) = src->n_frames;
198   src->n_frames++;
199   GST_BUFFER_OFFSET_END (*buf) = src->n_frames;
200   if (gl_src->out_info.fps_n) {
201     next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
202         gl_src->out_info.fps_d, gl_src->out_info.fps_n);
203     GST_BUFFER_DURATION (*buf) = next_time - gl_src->running_time;
204   } else {
205     next_time = ts_offset;
206     GST_BUFFER_DURATION (*buf) = GST_CLOCK_TIME_NONE;
207   }
208 
209   GST_LOG_OBJECT (src, "Created buffer from SHM %" GST_PTR_FORMAT, *buf);
210 
211   gl_src->running_time = next_time;
212 
213   ret = GST_FLOW_OK;
214   WPE_UNLOCK (src);
215   return ret;
216 }
217 
218 static GQuark
_egl_image_quark(void)219 _egl_image_quark (void)
220 {
221   static GQuark quark = 0;
222 
223   if (!quark)
224     quark = g_quark_from_static_string ("GstWPEEGLImage");
225   return quark;
226 }
227 
228 static gboolean
gst_wpe_video_src_fill_memory(GstGLBaseSrc * bsrc,GstGLMemory * memory)229 gst_wpe_video_src_fill_memory (GstGLBaseSrc * bsrc, GstGLMemory * memory)
230 {
231   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
232   const GstGLFuncs *gl;
233   guint tex_id;
234   GstEGLImage *locked_image;
235 
236   if (!gst_gl_context_check_feature (GST_GL_CONTEXT (bsrc->context),
237           "EGL_KHR_image_base")) {
238     GST_ERROR_OBJECT (src, "EGL_KHR_image_base is not supported");
239     return FALSE;
240   }
241 
242   WPE_LOCK (src);
243 
244   gl = bsrc->context->gl_vtable;
245   tex_id = gst_gl_memory_get_texture_id (memory);
246   locked_image = src->view->image ();
247 
248   if (!locked_image) {
249     WPE_UNLOCK (src);
250     return TRUE;
251   }
252 
253   // The EGLImage is implicitely associated with the memory we're filling, so we
254   // need to ensure their life cycles are tied.
255   gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (memory), _egl_image_quark (),
256       gst_egl_image_ref (locked_image), (GDestroyNotify) gst_egl_image_unref);
257 
258   gl->ActiveTexture (GL_TEXTURE0 + memory->plane);
259   gl->BindTexture (GL_TEXTURE_2D, tex_id);
260   gl->EGLImageTargetTexture2D (GL_TEXTURE_2D,
261       gst_egl_image_get_image (locked_image));
262   gl->Flush ();
263   WPE_UNLOCK (src);
264   return TRUE;
265 }
266 
267 static gboolean
gst_wpe_video_src_start(GstWpeVideoSrc * src)268 gst_wpe_video_src_start (GstWpeVideoSrc * src)
269 {
270   GstGLContext *context = NULL;
271   GstGLDisplay *display = NULL;
272   GstGLBaseSrc *base_src = GST_GL_BASE_SRC (src);
273   gboolean created_view = FALSE;
274   GBytes *bytes;
275 
276   GST_INFO_OBJECT (src, "Starting up");
277   WPE_LOCK (src);
278 
279   if (src->gl_enabled) {
280     context = base_src->context;
281     display = base_src->display;
282   }
283 
284   GST_DEBUG_OBJECT (src, "Will %sfill GLMemories",
285       src->gl_enabled ? "" : "NOT ");
286 
287   auto & thread = WPEContextThread::singleton ();
288 
289   if (!src->view) {
290     src->view = thread.createWPEView (src, context, display,
291         GST_VIDEO_INFO_WIDTH (&base_src->out_info),
292         GST_VIDEO_INFO_HEIGHT (&base_src->out_info));
293     created_view = TRUE;
294     GST_DEBUG_OBJECT (src, "created view %p", src->view);
295   }
296 
297   if (!src->view) {
298     WPE_UNLOCK (src);
299     GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
300         ("WPEBackend-FDO EGL display initialisation failed"), (NULL));
301     return FALSE;
302   }
303 
304   GST_OBJECT_LOCK (src);
305   bytes = src->bytes;
306   src->bytes = NULL;
307   GST_OBJECT_UNLOCK (src);
308 
309   if (bytes != NULL) {
310     src->view->loadData (bytes);
311     g_bytes_unref (bytes);
312   }
313 
314   if (created_view) {
315     src->n_frames = 0;
316   }
317   WPE_UNLOCK (src);
318   return TRUE;
319 }
320 
321 static gboolean
gst_wpe_video_src_decide_allocation(GstBaseSrc * base_src,GstQuery * query)322 gst_wpe_video_src_decide_allocation (GstBaseSrc * base_src, GstQuery * query)
323 {
324   GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (base_src);
325   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
326   GstCapsFeatures *caps_features;
327 
328   WPE_LOCK (src);
329   caps_features = gst_caps_get_features (gl_src->out_caps, 0);
330   if (caps_features != NULL
331       && gst_caps_features_contains (caps_features,
332           GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
333     src->gl_enabled = TRUE;
334   } else {
335     src->gl_enabled = FALSE;
336   }
337 
338   if (src->gl_enabled) {
339     WPE_UNLOCK (src);
340     return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, decide_allocation,
341         (base_src, query), FALSE);
342   }
343   WPE_UNLOCK (src);
344   return gst_wpe_video_src_start (src);
345 }
346 
347 static gboolean
gst_wpe_video_src_gl_start(GstGLBaseSrc * base_src)348 gst_wpe_video_src_gl_start (GstGLBaseSrc * base_src)
349 {
350   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
351   return gst_wpe_video_src_start (src);
352 }
353 
354 static void
gst_wpe_video_src_stop_unlocked(GstWpeVideoSrc * src)355 gst_wpe_video_src_stop_unlocked (GstWpeVideoSrc * src)
356 {
357   if (src->view) {
358     GST_DEBUG_OBJECT (src, "deleting view %p", src->view);
359     delete src->view;
360     src->view = NULL;
361   }
362 }
363 
364 static void
gst_wpe_video_src_gl_stop(GstGLBaseSrc * base_src)365 gst_wpe_video_src_gl_stop (GstGLBaseSrc * base_src)
366 {
367   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
368 
369   WPE_LOCK (src);
370   gst_wpe_video_src_stop_unlocked (src);
371   WPE_UNLOCK (src);
372 }
373 
374 static gboolean
gst_wpe_video_src_stop(GstBaseSrc * base_src)375 gst_wpe_video_src_stop (GstBaseSrc * base_src)
376 {
377   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
378 
379   /* we can call this always, GstGLBaseSrc is smart enough to not crash if
380    * gst_gl_base_src_gl_start() has not been called from chaining up
381    * gst_wpe_video_src_decide_allocation() */
382   if (!GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, stop, (base_src),
383           FALSE))
384     return FALSE;
385 
386   WPE_LOCK (src);
387 
388   /* if gl-enabled, gst_wpe_video_src_stop_unlocked() would have already been called
389    * inside gst_wpe_video_src_gl_stop() from the base class stopping the OpenGL
390    * context */
391   if (!src->gl_enabled)
392     gst_wpe_video_src_stop_unlocked (src);
393 
394   WPE_UNLOCK (src);
395   return TRUE;
396 }
397 
398 static GstCaps *
gst_wpe_video_src_fixate(GstBaseSrc * base_src,GstCaps * combined_caps)399 gst_wpe_video_src_fixate (GstBaseSrc * base_src, GstCaps * combined_caps)
400 {
401   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
402   GstStructure *structure;
403   gint width, height;
404   GstCaps *caps;
405 
406   /* In situation where software GL support is explicitly requested, select raw
407    * caps, otherwise perform default caps negotiation. Unfortunately at this
408    * point we don't know yet if a GL context will be usable or not, so we can't
409    * check the element GstContext.
410    */
411   if (!g_strcmp0 (g_getenv ("LIBGL_ALWAYS_SOFTWARE"), "true")) {
412     caps = gst_caps_from_string (WPE_RAW_CAPS);
413   } else {
414     caps = gst_caps_make_writable (combined_caps);
415   }
416 
417   structure = gst_caps_get_structure (caps, 0);
418 
419   gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
420   gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
421 
422   if (gst_structure_has_field (structure, "framerate"))
423     gst_structure_fixate_field_nearest_fraction (structure, "framerate",
424         DEFAULT_FPS_N, DEFAULT_FPS_D);
425   else
426     gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, DEFAULT_FPS_N,
427         DEFAULT_FPS_D, NULL);
428 
429   caps = GST_BASE_SRC_CLASS (parent_class)->fixate (base_src, caps);
430   GST_INFO_OBJECT (base_src, "Fixated caps to %" GST_PTR_FORMAT, caps);
431 
432   if (src->view) {
433     gst_structure_get (structure, "width", G_TYPE_INT, &width, "height",
434         G_TYPE_INT, &height, NULL);
435     src->view->resize (width, height);
436   }
437   return caps;
438 }
439 
440 void
gst_wpe_video_src_configure_web_view(GstWpeVideoSrc * src,WebKitWebView * webview)441 gst_wpe_video_src_configure_web_view (GstWpeVideoSrc * src,
442     WebKitWebView * webview)
443 {
444   GValue args[2] = { {0}, {0} };
445 
446   g_value_init (&args[0], GST_TYPE_ELEMENT);
447   g_value_set_object (&args[0], src);
448   g_value_init (&args[1], G_TYPE_OBJECT);
449   g_value_set_object (&args[1], webview);
450 
451   g_signal_emitv (args, gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW], 0,
452       NULL);
453 
454   g_value_unset (&args[0]);
455   g_value_unset (&args[1]);
456 }
457 
458 static void
gst_wpe_video_src_load_bytes(GstWpeVideoSrc * src,GBytes * bytes)459 gst_wpe_video_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
460 {
461   if (src->view && GST_STATE (GST_ELEMENT_CAST (src)) > GST_STATE_NULL) {
462     src->view->loadData (bytes);
463   } else {
464     GST_OBJECT_LOCK (src);
465     if (src->bytes)
466       g_bytes_unref (src->bytes);
467     src->bytes = g_bytes_ref (bytes);
468     GST_OBJECT_UNLOCK (src);
469   }
470 }
471 
472 static gboolean
gst_wpe_video_src_set_location(GstWpeVideoSrc * src,const gchar * location,GError ** error)473 gst_wpe_video_src_set_location (GstWpeVideoSrc * src, const gchar * location,
474     GError ** error)
475 {
476   GST_OBJECT_LOCK (src);
477   g_free (src->location);
478   src->location = g_strdup (location);
479   GST_OBJECT_UNLOCK (src);
480 
481   if (src->view)
482     src->view->loadUri (location);
483 
484   return TRUE;
485 }
486 
487 static void
gst_wpe_video_src_set_draw_background(GstWpeVideoSrc * src,gboolean draw_background)488 gst_wpe_video_src_set_draw_background (GstWpeVideoSrc * src,
489     gboolean draw_background)
490 {
491   GST_OBJECT_LOCK (src);
492   src->draw_background = draw_background;
493   GST_OBJECT_UNLOCK (src);
494 
495   if (src->view)
496     src->view->setDrawBackground (draw_background);
497 }
498 
499 static void
gst_wpe_video_src_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)500 gst_wpe_video_src_set_property (GObject * object, guint prop_id,
501     const GValue * value, GParamSpec * pspec)
502 {
503   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
504 
505   switch (prop_id) {
506     case PROP_LOCATION:
507     {
508       const gchar *location;
509 
510       location = g_value_get_string (value);
511       if (location == NULL) {
512         GST_WARNING_OBJECT (src, "location property cannot be NULL");
513         return;
514       }
515 
516       if (!gst_wpe_video_src_set_location (src, location, NULL)) {
517         GST_WARNING_OBJECT (src, "badly formatted location");
518         return;
519       }
520       break;
521     }
522     case PROP_DRAW_BACKGROUND:
523       gst_wpe_video_src_set_draw_background (src, g_value_get_boolean (value));
524       break;
525     default:
526       break;
527   }
528 }
529 
530 static void
gst_wpe_video_src_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)531 gst_wpe_video_src_get_property (GObject * object, guint prop_id, GValue * value,
532     GParamSpec * pspec)
533 {
534   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
535 
536   switch (prop_id) {
537     case PROP_LOCATION:
538       GST_OBJECT_LOCK (src);
539       g_value_set_string (value, src->location);
540       GST_OBJECT_UNLOCK (src);
541       break;
542     case PROP_DRAW_BACKGROUND:
543       GST_OBJECT_LOCK (src);
544       g_value_set_boolean (value, src->draw_background);
545       GST_OBJECT_UNLOCK (src);
546       break;
547     default:
548       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
549       break;
550   }
551 }
552 
553 static gboolean
gst_wpe_video_src_event(GstBaseSrc * base_src,GstEvent * event)554 gst_wpe_video_src_event (GstBaseSrc * base_src, GstEvent * event)
555 {
556   gboolean ret = FALSE;
557   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
558 
559   if (src->view && GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION) {
560     const gchar *key;
561     gint button;
562     gdouble x, y, delta_x, delta_y;
563 
564     GST_DEBUG_OBJECT (src, "Processing event %" GST_PTR_FORMAT, event);
565     switch (gst_navigation_event_get_type (event)) {
566       case GST_NAVIGATION_EVENT_KEY_PRESS:
567       case GST_NAVIGATION_EVENT_KEY_RELEASE:
568         if (gst_navigation_event_parse_key_event (event, &key)) {
569           /* FIXME: This is wrong... The GstNavigation API should pass
570              hardware-level information, not high-level keysym strings */
571           uint32_t keysym =
572               (uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
573           struct wpe_input_keyboard_event wpe_event;
574           wpe_event.key_code = keysym;
575           wpe_event.pressed =
576               gst_navigation_event_get_type (event) ==
577               GST_NAVIGATION_EVENT_KEY_PRESS;
578           src->view->dispatchKeyboardEvent (wpe_event);
579           ret = TRUE;
580         }
581         break;
582       case GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS:
583       case GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE:
584         if (gst_navigation_event_parse_mouse_button_event (event, &button, &x,
585                 &y)) {
586           struct wpe_input_pointer_event wpe_event;
587           wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
588           wpe_event.type = wpe_input_pointer_event_type_button;
589           wpe_event.x = (int) x;
590           wpe_event.y = (int) y;
591           if (button == 1) {
592             wpe_event.modifiers = wpe_input_pointer_modifier_button1;
593           } else if (button == 2) {
594             wpe_event.modifiers = wpe_input_pointer_modifier_button2;
595           } else if (button == 3) {
596             wpe_event.modifiers = wpe_input_pointer_modifier_button3;
597           } else if (button == 4) {
598             wpe_event.modifiers = wpe_input_pointer_modifier_button4;
599           } else if (button == 5) {
600             wpe_event.modifiers = wpe_input_pointer_modifier_button5;
601           }
602           wpe_event.button = button;
603           wpe_event.state =
604               gst_navigation_event_get_type (event) ==
605               GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS;
606           src->view->dispatchPointerEvent (wpe_event);
607           ret = TRUE;
608         }
609         break;
610       case GST_NAVIGATION_EVENT_MOUSE_MOVE:
611         if (gst_navigation_event_parse_mouse_move_event (event, &x, &y)) {
612           struct wpe_input_pointer_event wpe_event;
613           wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
614           wpe_event.type = wpe_input_pointer_event_type_motion;
615           wpe_event.x = (int) x;
616           wpe_event.y = (int) y;
617           src->view->dispatchPointerEvent (wpe_event);
618           ret = TRUE;
619         }
620         break;
621       case GST_NAVIGATION_EVENT_MOUSE_SCROLL:
622         if (gst_navigation_event_parse_mouse_scroll_event (event, &x, &y,
623                 &delta_x, &delta_y)) {
624           struct wpe_input_axis_event wpe_event;
625           if (delta_x) {
626             wpe_event.axis = 1;
627             wpe_event.value = delta_x;
628           } else {
629             wpe_event.axis = 0;
630             wpe_event.value = delta_y;
631           }
632           wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
633           wpe_event.type = wpe_input_axis_event_type_motion;
634           wpe_event.x = (int) x;
635           wpe_event.y = (int) y;
636           src->view->dispatchAxisEvent (wpe_event);
637           ret = TRUE;
638         }
639         break;
640       default:
641         break;
642     }
643     /* FIXME: No touch events handling support in GstNavigation */
644   }
645 
646   if (!ret) {
647     ret =
648         GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, event, (base_src,
649             event), FALSE);
650   }
651   return ret;
652 }
653 
654 static void
gst_wpe_video_src_init(GstWpeVideoSrc * src)655 gst_wpe_video_src_init (GstWpeVideoSrc * src)
656 {
657   src->draw_background = DEFAULT_DRAW_BACKGROUND;
658 
659   gst_base_src_set_live (GST_BASE_SRC_CAST (src), TRUE);
660 
661   g_mutex_init (&src->lock);
662 }
663 
664 static void
gst_wpe_video_src_finalize(GObject * object)665 gst_wpe_video_src_finalize (GObject * object)
666 {
667   GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
668 
669   g_free (src->location);
670   g_clear_pointer (&src->bytes, g_bytes_unref);
671   g_mutex_clear (&src->lock);
672 
673   G_OBJECT_CLASS (parent_class)->finalize (object);
674 }
675 
676 static void
gst_wpe_video_src_class_init(GstWpeVideoSrcClass * klass)677 gst_wpe_video_src_class_init (GstWpeVideoSrcClass * klass)
678 {
679   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
680   GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
681   GstGLBaseSrcClass *gl_base_src_class = GST_GL_BASE_SRC_CLASS (klass);
682   GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
683   GstPadTemplate *tmpl;
684   GstCaps *doc_caps;
685 
686   gobject_class->set_property = gst_wpe_video_src_set_property;
687   gobject_class->get_property = gst_wpe_video_src_get_property;
688   gobject_class->finalize = gst_wpe_video_src_finalize;
689 
690   g_object_class_install_property (gobject_class, PROP_LOCATION,
691       g_param_spec_string ("location", "location",
692           "The URL to display",
693           "", (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
694   g_object_class_install_property (gobject_class, PROP_DRAW_BACKGROUND,
695       g_param_spec_boolean ("draw-background", "Draws the background",
696           "Whether to draw the WebView background", DEFAULT_DRAW_BACKGROUND,
697           (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
698 
699   gst_element_class_set_static_metadata (gstelement_class,
700       "WPE source", "Source/Video",
701       "Creates a video stream from a WPE browser",
702       "Philippe Normand <philn@igalia.com>, Žan Doberšek <zdobersek@igalia.com>");
703 
704   tmpl = gst_static_pad_template_get (&src_factory);
705   gst_element_class_add_pad_template (gstelement_class, tmpl);
706 
707   base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_wpe_video_src_fixate);
708   base_src_class->create = GST_DEBUG_FUNCPTR (gst_wpe_video_src_create);
709   base_src_class->decide_allocation =
710       GST_DEBUG_FUNCPTR (gst_wpe_video_src_decide_allocation);
711   base_src_class->stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_stop);
712   base_src_class->event = GST_DEBUG_FUNCPTR (gst_wpe_video_src_event);
713 
714   gl_base_src_class->supported_gl_api =
715       static_cast < GstGLAPI >
716       (GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2);
717   gl_base_src_class->gl_start = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_start);
718   gl_base_src_class->gl_stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_stop);
719   gl_base_src_class->fill_gl_memory =
720       GST_DEBUG_FUNCPTR (gst_wpe_video_src_fill_memory);
721 
722   doc_caps = gst_caps_from_string (WPE_VIDEO_SRC_DOC_CAPS);
723   gst_pad_template_set_documentation_caps (tmpl, doc_caps);
724   gst_clear_caps (&doc_caps);
725 
726   /**
727    * GstWpeVideoSrc::configure-web-view:
728    * @src: the object which received the signal
729    * @webview: the webView
730    *
731    * Allow application to configure the webView settings.
732    */
733   gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW] =
734       g_signal_new ("configure-web-view", G_TYPE_FROM_CLASS (klass),
735       G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_OBJECT);
736 
737   /**
738    * GstWpeVideoSrc::load-bytes:
739    * @src: the object which received the signal
740    * @bytes: the GBytes data to load
741    *
742    * Load the specified bytes into the internal webView.
743    */
744   gst_wpe_video_src_signals[SIGNAL_LOAD_BYTES] =
745       g_signal_new_class_handler ("load-bytes", G_TYPE_FROM_CLASS (klass),
746       static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
747       G_CALLBACK (gst_wpe_video_src_load_bytes), NULL, NULL, NULL,
748       G_TYPE_NONE, 1, G_TYPE_BYTES);
749 }
750