1 /*
2 * Copyright (C) 2021 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "gst_video_display_sink.h"
17
18 namespace {
19 constexpr guint64 DEFAULT_MAX_WAIT_CLOCK_TIME = 200000000; // ns, 200ms
20 constexpr gint64 DEFAULT_AUDIO_RUNNING_TIME_DIFF_THD = 20000000; // ns, 20ms
21 constexpr gint64 DEFAULT_EXTRA_RENDER_FRAME_DIFF = 20000000; // ns, 20ms
22 }
23
24 enum {
25 PROP_0,
26 PROP_AUDIO_SINK,
27 PROP_ENABLE_KPI_AVSYNC_LOG,
28 };
29
30 struct _GstVideoDisplaySinkPrivate {
31 GstElement *audio_sink;
32 gboolean enable_kpi_avsync_log;
33 GMutex mutex;
34 guint64 render_time_diff_threshold;
35 guint64 last_video_render_pts;
36 };
37
38 #define gst_video_display_sink_parent_class parent_class
39 G_DEFINE_TYPE_WITH_CODE(GstVideoDisplaySink, gst_video_display_sink,
40 GST_TYPE_SURFACE_MEM_SINK, G_ADD_PRIVATE(GstVideoDisplaySink));
41
42 GST_DEBUG_CATEGORY_STATIC(gst_video_display_sink_debug_category);
43 #define GST_CAT_DEFAULT gst_video_display_sink_debug_category
44
45 static void gst_video_display_sink_dispose(GObject *obj);
46 static void gst_video_display_sink_finalize(GObject *obj);
47 static void gst_video_display_sink_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec);
48 static GstFlowReturn gst_video_display_sink_do_app_render(GstSurfaceMemSink *surface_sink,
49 GstBuffer *buffer, bool is_preroll);
50 static GstClockTime gst_video_display_sink_update_reach_time(GstBaseSink *base_sink, GstClockTime reach_time);
51 static gboolean gst_video_display_sink_event(GstBaseSink *base_sink, GstEvent *event);
52
gst_video_display_sink_class_init(GstVideoDisplaySinkClass * klass)53 static void gst_video_display_sink_class_init(GstVideoDisplaySinkClass *klass)
54 {
55 g_return_if_fail(klass != nullptr);
56
57 GObjectClass *gobject_class = G_OBJECT_CLASS(klass);
58 GstSurfaceMemSinkClass *surface_sink_class = GST_SURFACE_MEM_SINK_CLASS(klass);
59 GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
60 GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS(klass);
61
62 gst_element_class_set_static_metadata(element_class,
63 "VideoDisplaySink", "Sink/Video", " Video Display sink", "OpenHarmony");
64
65 gobject_class->dispose = gst_video_display_sink_dispose;
66 gobject_class->finalize = gst_video_display_sink_finalize;
67 gobject_class->set_property = gst_video_display_sink_set_property;
68
69 g_object_class_install_property(gobject_class, PROP_AUDIO_SINK,
70 g_param_spec_pointer("audio-sink", "audio sink", "audio sink",
71 (GParamFlags)(G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS)));
72
73 g_object_class_install_property(gobject_class, PROP_ENABLE_KPI_AVSYNC_LOG,
74 g_param_spec_boolean("enable-kpi-avsync-log", "Enable KPI AV sync log", "Enable KPI AV sync log", FALSE,
75 (GParamFlags)(G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS)));
76
77 surface_sink_class->do_app_render = gst_video_display_sink_do_app_render;
78 base_sink_class->update_reach_time = gst_video_display_sink_update_reach_time;
79 base_sink_class->event = gst_video_display_sink_event;
80 GST_DEBUG_CATEGORY_INIT(gst_video_display_sink_debug_category, "videodisplaysink", 0, "videodisplaysink class");
81 }
82
gst_video_display_sink_init(GstVideoDisplaySink * sink)83 static void gst_video_display_sink_init(GstVideoDisplaySink *sink)
84 {
85 g_return_if_fail(sink != nullptr);
86
87 auto priv = reinterpret_cast<GstVideoDisplaySinkPrivate *>(gst_video_display_sink_get_instance_private(sink));
88 g_return_if_fail(priv != nullptr);
89
90 sink->priv = priv;
91 priv->audio_sink = nullptr;
92 priv->enable_kpi_avsync_log = FALSE;
93 g_mutex_init(&priv->mutex);
94 priv->last_video_render_pts = 0;
95 priv->render_time_diff_threshold = DEFAULT_MAX_WAIT_CLOCK_TIME;
96 }
97
gst_video_display_sink_dispose(GObject * obj)98 static void gst_video_display_sink_dispose(GObject *obj)
99 {
100 g_return_if_fail(obj != nullptr);
101 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(obj);
102 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
103
104 g_mutex_lock(&priv->mutex);
105 if (priv->audio_sink != nullptr) {
106 gst_object_unref(priv->audio_sink);
107 priv->audio_sink = nullptr;
108 }
109 g_mutex_unlock(&priv->mutex);
110 G_OBJECT_CLASS(parent_class)->dispose(obj);
111 }
112
gst_video_display_sink_finalize(GObject * obj)113 static void gst_video_display_sink_finalize(GObject *obj)
114 {
115 g_return_if_fail(obj != nullptr);
116 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(obj);
117 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
118 g_return_if_fail(priv != nullptr);
119
120 g_mutex_clear(&priv->mutex);
121 G_OBJECT_CLASS(parent_class)->finalize(obj);
122 }
123
gst_video_display_sink_set_audio_sink(GstVideoDisplaySink * video_display_sink,gpointer audio_sink)124 static void gst_video_display_sink_set_audio_sink(GstVideoDisplaySink *video_display_sink, gpointer audio_sink)
125 {
126 g_return_if_fail(audio_sink != nullptr);
127
128 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
129 g_mutex_lock(&priv->mutex);
130 if (priv->audio_sink != nullptr) {
131 GST_INFO_OBJECT(video_display_sink, "has audio sink: %s, unref it", GST_ELEMENT_NAME(priv->audio_sink));
132 gst_object_unref(priv->audio_sink);
133 }
134 priv->audio_sink = GST_ELEMENT_CAST(gst_object_ref(audio_sink));
135 GST_INFO_OBJECT(video_display_sink, "get audio sink: %s", GST_ELEMENT_NAME(priv->audio_sink));
136 g_mutex_unlock(&priv->mutex);
137 }
138
gst_video_display_sink_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)139 static void gst_video_display_sink_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec)
140 {
141 g_return_if_fail(object != nullptr);
142 g_return_if_fail(value != nullptr);
143 g_return_if_fail(pspec != nullptr);
144
145 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(object);
146 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
147 g_return_if_fail(priv != nullptr);
148
149 switch (prop_id) {
150 case PROP_AUDIO_SINK:
151 gst_video_display_sink_set_audio_sink(video_display_sink, g_value_get_pointer(value));
152 break;
153 case PROP_ENABLE_KPI_AVSYNC_LOG:
154 g_mutex_lock(&priv->mutex);
155 priv->enable_kpi_avsync_log = g_value_get_boolean(value);
156 g_mutex_unlock(&priv->mutex);
157 break;
158 default:
159 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
160 break;
161 }
162 }
163
gst_video_display_sink_event(GstBaseSink * base_sink,GstEvent * event)164 static gboolean gst_video_display_sink_event(GstBaseSink *base_sink, GstEvent *event)
165 {
166 g_return_val_if_fail(event != nullptr, FALSE);
167 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(base_sink);
168 g_return_val_if_fail(video_display_sink != nullptr, FALSE);
169 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
170
171 switch (event->type) {
172 case GST_EVENT_FLUSH_STOP: {
173 if (priv != nullptr) {
174 g_mutex_lock(&priv->mutex);
175 priv->last_video_render_pts = 0;
176 g_mutex_unlock(&priv->mutex);
177 }
178 break;
179 }
180 default:
181 break;
182 }
183 return GST_BASE_SINK_CLASS(parent_class)->event(base_sink, event);
184 }
185
kpi_log_avsync_diff(GstVideoDisplaySink * video_display_sink,guint64 last_render_pts)186 static void kpi_log_avsync_diff(GstVideoDisplaySink *video_display_sink, guint64 last_render_pts)
187 {
188 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
189 guint64 audio_last_render_pts = 0;
190
191 // get av sync diff time
192 g_mutex_lock(&priv->mutex);
193 if (priv->enable_kpi_avsync_log && priv->audio_sink != nullptr) {
194 g_object_get(priv->audio_sink, "last-render-pts", &audio_last_render_pts, nullptr);
195 GST_WARNING_OBJECT(video_display_sink, "KPI-TRACE: audio_last_render_pts=%" G_GUINT64_FORMAT
196 ", video_last_render_pts=%" G_GUINT64_FORMAT ", diff=%" G_GINT64_FORMAT " ms",
197 audio_last_render_pts, last_render_pts,
198 ((gint64)audio_last_render_pts - (gint64)last_render_pts) / GST_MSECOND);
199 }
200 g_mutex_unlock(&priv->mutex);
201 }
202
gst_video_display_sink_get_render_time_diff_thd(GstVideoDisplaySink * video_display_sink,GstClockTime duration)203 static void gst_video_display_sink_get_render_time_diff_thd(GstVideoDisplaySink *video_display_sink,
204 GstClockTime duration)
205 {
206 if (!GST_CLOCK_TIME_IS_VALID(duration)) {
207 return;
208 }
209 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
210 if (priv == nullptr) {
211 return;
212 }
213
214 guint64 render_time_diff_thd = duration + DEFAULT_EXTRA_RENDER_FRAME_DIFF;
215 if (render_time_diff_thd > DEFAULT_MAX_WAIT_CLOCK_TIME) {
216 // Low framerate does not enter smoothing logic to prevent video render too fast.
217 priv->render_time_diff_threshold = G_MAXUINT64;
218 GST_DEBUG_OBJECT(video_display_sink, "render_time_diff_thd is greater than DEFAULT_MAX_WAIT_CLOCK_TIME");
219 } else if (render_time_diff_thd != priv->render_time_diff_threshold) {
220 priv->render_time_diff_threshold = render_time_diff_thd;
221 GST_INFO_OBJECT(video_display_sink,
222 "get new render_time_diff_threshold=%" G_GUINT64_FORMAT, render_time_diff_thd);
223 }
224 }
225
gst_video_display_sink_do_app_render(GstSurfaceMemSink * surface_sink,GstBuffer * buffer,bool is_preroll)226 static GstFlowReturn gst_video_display_sink_do_app_render(GstSurfaceMemSink *surface_sink,
227 GstBuffer *buffer, bool is_preroll)
228 {
229 (void)is_preroll;
230 g_return_val_if_fail(surface_sink != nullptr && buffer != nullptr, GST_FLOW_ERROR);
231 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(surface_sink);
232
233 kpi_log_avsync_diff(video_display_sink, GST_BUFFER_PTS(buffer));
234
235 /* The value of GST_BUFFER_DURATION(buffer) is average duration, which has no reference
236 value in the variable frame rate stream, because the actual duration of each frame varies greatly.
237 It is difficult to obtain the duration of the current frame, so using the duration of the previous
238 frame does not affect perception */
239 GstClockTime last_duration = GST_BUFFER_PTS(buffer) - video_display_sink->priv->last_video_render_pts;
240 if (GST_BUFFER_PTS(buffer) <= video_display_sink->priv->last_video_render_pts ||
241 video_display_sink->priv->last_video_render_pts == 0) {
242 last_duration = GST_BUFFER_DURATION(buffer);
243 }
244
245 GST_DEBUG_OBJECT(video_display_sink, "avg duration %" G_GUINT64_FORMAT ", last_duration %" G_GUINT64_FORMAT
246 ", pts %" G_GUINT64_FORMAT, GST_BUFFER_DURATION(buffer), last_duration, GST_BUFFER_PTS(buffer));
247 video_display_sink->priv->last_video_render_pts = GST_BUFFER_PTS(buffer);
248 gst_video_display_sink_get_render_time_diff_thd(video_display_sink, last_duration);
249 return GST_FLOW_OK;
250 }
251
gst_video_display_sink_adjust_reach_time_by_jitter(GstVideoDisplaySink * video_display_sink,GstClockTime reach_time)252 static GstClockTime gst_video_display_sink_adjust_reach_time_by_jitter(GstVideoDisplaySink *video_display_sink,
253 GstClockTime reach_time)
254 {
255 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
256 if (priv == nullptr) {
257 return reach_time;
258 }
259
260 g_mutex_lock(&priv->mutex);
261 if (priv->audio_sink != nullptr) {
262 gint64 audio_running_time_diff = 0;
263 g_object_get(priv->audio_sink, "last-running-time-diff", &audio_running_time_diff, nullptr);
264 if (audio_running_time_diff > DEFAULT_AUDIO_RUNNING_TIME_DIFF_THD) {
265 GST_LOG_OBJECT(video_display_sink, "audio_running_time_diff=%" G_GINT64_FORMAT
266 ", old reach_time=%" G_GUINT64_FORMAT ", new reach_time=%" G_GUINT64_FORMAT,
267 audio_running_time_diff, reach_time, reach_time + audio_running_time_diff);
268 reach_time += audio_running_time_diff;
269 }
270 }
271 g_mutex_unlock(&priv->mutex);
272 return reach_time;
273 }
274
gst_video_display_sink_update_reach_time(GstBaseSink * base_sink,GstClockTime reach_time)275 static GstClockTime gst_video_display_sink_update_reach_time(GstBaseSink *base_sink, GstClockTime reach_time)
276 {
277 g_return_val_if_fail(base_sink != nullptr, reach_time);
278 g_return_val_if_fail(GST_CLOCK_TIME_IS_VALID(reach_time), reach_time);
279 GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(base_sink);
280 GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
281 if (priv == nullptr || priv->render_time_diff_threshold == G_MAXUINT64) {
282 return reach_time;
283 }
284
285 // 1st: update reach_time by audio running time jitter
286 GstClockTime new_reach_time = gst_video_display_sink_adjust_reach_time_by_jitter(video_display_sink, reach_time);
287
288 // 2ed: update reach_time if the running_time_diff exceeded the threshold
289 GstClockTime base_time = gst_element_get_base_time(GST_ELEMENT(base_sink)); // get base time
290 GstClockTime cur_clock_time = gst_clock_get_time(GST_ELEMENT_CLOCK(base_sink)); // get current clock time
291 if (!GST_CLOCK_TIME_IS_VALID(base_time) || !GST_CLOCK_TIME_IS_VALID(cur_clock_time)) {
292 return new_reach_time;
293 }
294 if (cur_clock_time < base_time) {
295 return new_reach_time;
296 }
297 GstClockTime cur_running_time = cur_clock_time - base_time; // get running time
298 if (cur_running_time >= new_reach_time) {
299 return new_reach_time;
300 }
301 GstClockTime running_time_diff = new_reach_time - cur_running_time;
302 if (running_time_diff > priv->render_time_diff_threshold) {
303 new_reach_time = new_reach_time - (running_time_diff - priv->render_time_diff_threshold);
304 }
305 if (new_reach_time != reach_time) {
306 GST_LOG_OBJECT(video_display_sink,
307 "running_time_diff:%" G_GUINT64_FORMAT " old reach_time:%" G_GUINT64_FORMAT
308 " new reach_time:%" G_GUINT64_FORMAT, running_time_diff, reach_time, new_reach_time);
309 }
310 return new_reach_time;
311 }
312