• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "gst_video_display_sink.h"
17 #include <string>
18 #include "param_wrapper.h"
19 
20 using namespace OHOS;
21 namespace {
22     constexpr guint64 DEFAULT_MAX_WAIT_CLOCK_TIME = 200000000; // ns, 200ms
23     constexpr gint64 DEFAULT_AUDIO_RUNNING_TIME_DIFF_THD = 20000000; // ns, 20ms
24     constexpr gint64 DEFAULT_EXTRA_RENDER_FRAME_DIFF = 5000000; // ns, 5ms
25     constexpr gint DEFAULT_DROP_BEHIND_VIDEO_BUF_FREQUENCY = 5; // drop 1 buffer every 5 buffers at most
26     constexpr gint64 DEFAULT_VIDEO_BEHIND_AUDIO_THD = 90000000; // 90ms, level B
27 }
28 
29 enum {
30     PROP_0,
31     PROP_AUDIO_SINK,
32     PROP_ENABLE_KPI_AVSYNC_LOG,
33 };
34 
35 struct _GstVideoDisplaySinkPrivate {
36     GstElement *audio_sink;
37     gboolean enable_kpi_avsync_log;
38     gboolean enable_drop;
39     gboolean close_avsync;
40     GMutex mutex;
41     guint64 render_time_diff_threshold;
42     guint buffer_count;
43     guint64 total_video_buffer_num;
44     guint64 dropped_video_buffer_num;
45     guint64 last_video_render_pts;
46     guint bandwidth;
47     gboolean need_report_bandwidth;
48     gboolean start_first_render;
49     guint audio_delay_time;
50     guint video_delay_time;
51 };
52 
53 #define gst_video_display_sink_parent_class parent_class
54 G_DEFINE_TYPE_WITH_CODE(GstVideoDisplaySink, gst_video_display_sink,
55                         GST_TYPE_SURFACE_MEM_SINK, G_ADD_PRIVATE(GstVideoDisplaySink));
56 
57 GST_DEBUG_CATEGORY_STATIC(gst_video_display_sink_debug_category);
58 #define GST_CAT_DEFAULT gst_video_display_sink_debug_category
59 
60 static void gst_video_display_sink_dispose(GObject *obj);
61 static void gst_video_display_sink_finalize(GObject *obj);
62 static void gst_video_display_sink_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec);
63 static GstFlowReturn gst_video_display_sink_do_app_render(GstSurfaceMemSink *surface_sink,
64     GstBuffer *buffer, bool is_preroll);
65 static GstClockTime gst_video_display_sink_update_reach_time(GstBaseSink *base_sink, GstClockTime reach_time,
66     gboolean *need_drop_this_buffer);
67 static gboolean gst_video_display_sink_event(GstBaseSink *base_sink, GstEvent *event);
68 static GstStateChangeReturn gst_video_display_sink_change_state(GstElement *element, GstStateChange transition);
69 static void gst_video_display_sink_enable_drop_from_sys_param(GstVideoDisplaySink *video_display_sink);
70 static void gst_close_avsync_from_sys_param(GstVideoDisplaySink *video_display_sink);
71 
gst_video_display_sink_class_init(GstVideoDisplaySinkClass * klass)72 static void gst_video_display_sink_class_init(GstVideoDisplaySinkClass *klass)
73 {
74     g_return_if_fail(klass != nullptr);
75 
76     GObjectClass *gobject_class = G_OBJECT_CLASS(klass);
77     GstSurfaceMemSinkClass *surface_sink_class = GST_SURFACE_MEM_SINK_CLASS(klass);
78     GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
79     GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS(klass);
80 
81     gst_element_class_set_static_metadata(element_class,
82         "VideoDisplaySink", "Sink/Video", " Video Display sink", "OpenHarmony");
83 
84     gobject_class->dispose = gst_video_display_sink_dispose;
85     gobject_class->finalize = gst_video_display_sink_finalize;
86     gobject_class->set_property = gst_video_display_sink_set_property;
87     element_class->change_state = gst_video_display_sink_change_state;
88 
89     g_signal_new("bandwidth-change", G_TYPE_FROM_CLASS(klass),
90         static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION), 0, NULL,
91         NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);  // 1 parameters
92 
93     g_object_class_install_property(gobject_class, PROP_AUDIO_SINK,
94         g_param_spec_pointer("audio-sink", "audio sink", "audio sink",
95             (GParamFlags)(G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS)));
96 
97     g_object_class_install_property(gobject_class, PROP_ENABLE_KPI_AVSYNC_LOG,
98         g_param_spec_boolean("enable-kpi-avsync-log", "Enable KPI AV sync log", "Enable KPI AV sync log", FALSE,
99             (GParamFlags)(G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS)));
100 
101     surface_sink_class->do_app_render = gst_video_display_sink_do_app_render;
102     base_sink_class->update_reach_time = gst_video_display_sink_update_reach_time;
103     base_sink_class->event = gst_video_display_sink_event;
104     GST_DEBUG_CATEGORY_INIT(gst_video_display_sink_debug_category, "videodisplaysink", 0, "videodisplaysink class");
105 }
106 
gst_video_display_sink_init(GstVideoDisplaySink * sink)107 static void gst_video_display_sink_init(GstVideoDisplaySink *sink)
108 {
109     g_return_if_fail(sink != nullptr);
110 
111     auto priv = reinterpret_cast<GstVideoDisplaySinkPrivate *>(gst_video_display_sink_get_instance_private(sink));
112     g_return_if_fail(priv != nullptr);
113 
114     sink->priv = priv;
115     priv->audio_sink = nullptr;
116     priv->enable_kpi_avsync_log = FALSE;
117     priv->close_avsync = FALSE;
118     g_mutex_init(&priv->mutex);
119     priv->render_time_diff_threshold = DEFAULT_MAX_WAIT_CLOCK_TIME;
120     priv->buffer_count = 1;
121     priv->bandwidth = 0;
122     priv->need_report_bandwidth = FALSE;
123     priv->total_video_buffer_num = 0;
124     priv->dropped_video_buffer_num = 0;
125     priv->last_video_render_pts = 0;
126     priv->start_first_render = FALSE;
127     priv->audio_delay_time = 0;
128     priv->video_delay_time = 0;
129     gst_video_display_sink_enable_drop_from_sys_param(sink);
130     gst_close_avsync_from_sys_param(sink);
131 }
132 
gst_video_display_sink_dispose(GObject * obj)133 static void gst_video_display_sink_dispose(GObject *obj)
134 {
135     g_return_if_fail(obj != nullptr);
136     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(obj);
137     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
138 
139     g_mutex_lock(&priv->mutex);
140     if (priv->audio_sink != nullptr) {
141         gst_object_unref(priv->audio_sink);
142         priv->audio_sink = nullptr;
143     }
144     g_mutex_unlock(&priv->mutex);
145     G_OBJECT_CLASS(parent_class)->dispose(obj);
146 }
147 
gst_video_display_sink_finalize(GObject * obj)148 static void gst_video_display_sink_finalize(GObject *obj)
149 {
150     g_return_if_fail(obj != nullptr);
151     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(obj);
152     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
153     if (priv != nullptr) {
154         g_mutex_clear(&priv->mutex);
155     }
156 
157     G_OBJECT_CLASS(parent_class)->finalize(obj);
158 }
159 
gst_video_display_sink_set_audio_sink(GstVideoDisplaySink * video_display_sink,gpointer audio_sink)160 static void gst_video_display_sink_set_audio_sink(GstVideoDisplaySink *video_display_sink, gpointer audio_sink)
161 {
162     g_return_if_fail(audio_sink != nullptr);
163 
164     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
165     g_mutex_lock(&priv->mutex);
166     if (priv->audio_sink != nullptr) {
167         GST_INFO_OBJECT(video_display_sink, "has audio sink: %s, unref it", GST_ELEMENT_NAME(priv->audio_sink));
168         gst_object_unref(priv->audio_sink);
169     }
170     priv->audio_sink = GST_ELEMENT_CAST(gst_object_ref(audio_sink));
171     GST_INFO_OBJECT(video_display_sink, "get audio sink: %s", GST_ELEMENT_NAME(priv->audio_sink));
172     g_mutex_unlock(&priv->mutex);
173 }
174 
gst_video_display_sink_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)175 static void gst_video_display_sink_set_property(GObject *object, guint prop_id, const GValue *value, GParamSpec *pspec)
176 {
177     g_return_if_fail(object != nullptr);
178     g_return_if_fail(value != nullptr);
179     g_return_if_fail(pspec != nullptr);
180 
181     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(object);
182     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
183     g_return_if_fail(priv != nullptr);
184 
185     switch (prop_id) {
186         case PROP_AUDIO_SINK:
187             gst_video_display_sink_set_audio_sink(video_display_sink, g_value_get_pointer(value));
188             break;
189         case PROP_ENABLE_KPI_AVSYNC_LOG:
190             g_mutex_lock(&priv->mutex);
191             priv->enable_kpi_avsync_log = g_value_get_boolean(value);
192             g_mutex_unlock(&priv->mutex);
193             break;
194         default:
195             G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
196             break;
197     }
198 }
199 
gst_video_display_sink_event(GstBaseSink * base_sink,GstEvent * event)200 static gboolean gst_video_display_sink_event(GstBaseSink *base_sink, GstEvent *event)
201 {
202     g_return_val_if_fail(event != nullptr, FALSE);
203     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(base_sink);
204     g_return_val_if_fail(video_display_sink != nullptr, FALSE);
205     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
206 
207     switch (event->type) {
208         case GST_EVENT_FLUSH_START: {
209             if (priv != nullptr) {
210                 g_mutex_lock(&priv->mutex);
211                 priv->start_first_render = TRUE;
212                 g_mutex_unlock(&priv->mutex);
213             }
214             break;
215         }
216         case GST_EVENT_FLUSH_STOP: {
217             if (priv != nullptr) {
218                 g_mutex_lock(&priv->mutex);
219                 priv->buffer_count = 1;
220                 priv->last_video_render_pts = 0;
221                 g_mutex_unlock(&priv->mutex);
222             }
223             break;
224         }
225         case GST_EVENT_TAG: {
226             GstTagList *tagList;
227             gst_event_parse_tag(event, &tagList);
228             guint bandwidth;
229             gst_tag_list_get_uint(tagList, "bandwidth", &bandwidth);
230             if (priv != nullptr && priv->bandwidth != bandwidth && bandwidth != 0) {
231                 GST_DEBUG_OBJECT(video_display_sink, "bandwidth is %u", bandwidth);
232                 priv->bandwidth = bandwidth;
233                 priv->need_report_bandwidth = TRUE;
234             }
235             break;
236         }
237         default:
238             break;
239     }
240     return GST_BASE_SINK_CLASS(parent_class)->event(base_sink, event);
241 }
242 
gst_video_display_sink_change_state(GstElement * element,GstStateChange transition)243 static GstStateChangeReturn gst_video_display_sink_change_state(GstElement *element, GstStateChange transition)
244 {
245     g_return_val_if_fail(element != nullptr, GST_STATE_CHANGE_FAILURE);
246     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK(element);
247     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
248 
249     switch (transition) {
250         case GST_STATE_CHANGE_PAUSED_TO_READY:
251             if (priv != nullptr) {
252                 g_mutex_lock(&priv->mutex);
253                 if (priv->total_video_buffer_num != 0) {
254                     GST_DEBUG_OBJECT(video_display_sink, "total video buffer num:%" G_GUINT64_FORMAT
255                         ", dropped video buffer num:%" G_GUINT64_FORMAT ", drop rate:%f",
256                         priv->total_video_buffer_num, priv->dropped_video_buffer_num,
257                         (gfloat)priv->dropped_video_buffer_num / priv->total_video_buffer_num);
258                 }
259                 priv->start_first_render = TRUE;
260                 priv->buffer_count = 1;
261                 priv->total_video_buffer_num = 0;
262                 priv->dropped_video_buffer_num = 0;
263                 priv->last_video_render_pts = 0;
264                 g_mutex_unlock(&priv->mutex);
265             }
266             break;
267         default:
268             break;
269     }
270     return GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
271 }
272 
gst_video_display_sink_enable_drop_from_sys_param(GstVideoDisplaySink * video_display_sink)273 static void gst_video_display_sink_enable_drop_from_sys_param(GstVideoDisplaySink *video_display_sink)
274 {
275     std::string drop_enable;
276     video_display_sink->priv->enable_drop = TRUE;
277     int32_t res = OHOS::system::GetStringParameter("sys.media.drop.video.buffer.enable", drop_enable, "");
278     if (res != 0 || drop_enable.empty()) {
279         GST_DEBUG_OBJECT(video_display_sink, "sys.media.drop.video.buffer.enable");
280         return;
281     }
282     GST_DEBUG_OBJECT(video_display_sink, "sys.media.drop.video.buffer.enable=%s", drop_enable.c_str());
283 
284     if (drop_enable == "false") {
285         video_display_sink->priv->enable_drop = FALSE;
286     }
287 }
288 
gst_close_avsync_from_sys_param(GstVideoDisplaySink * video_display_sink)289 static void gst_close_avsync_from_sys_param(GstVideoDisplaySink *video_display_sink)
290 {
291     std::string avsync_close;
292     video_display_sink->priv->close_avsync = FALSE;
293     int32_t res = OHOS::system::GetStringParameter("sys.media.close.avsync", avsync_close, "");
294     if (res != 0 || avsync_close.empty()) {
295         GST_DEBUG_OBJECT(video_display_sink, "sys.media.close.avsync");
296         return;
297     }
298     GST_DEBUG_OBJECT(video_display_sink, "sys.media.close.avsync=%s", avsync_close.c_str());
299 
300     if (avsync_close == "true") {
301         video_display_sink->priv->close_avsync = TRUE;
302     }
303 }
304 
kpi_log_avsync_diff(GstVideoDisplaySink * video_display_sink,guint64 last_render_pts)305 static void kpi_log_avsync_diff(GstVideoDisplaySink *video_display_sink, guint64 last_render_pts)
306 {
307     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
308     guint64 audio_last_render_pts = 0;
309 
310     // get av sync diff time
311     g_mutex_lock(&priv->mutex);
312     if (priv->enable_kpi_avsync_log && priv->audio_sink != nullptr) {
313     g_object_get(priv->audio_sink, "last-render-pts", &audio_last_render_pts, nullptr);
314     GST_WARNING_OBJECT(video_display_sink, "KPI-TRACE: audio_last_render_pts=%" G_GUINT64_FORMAT
315         ", video_last_render_pts=%" G_GUINT64_FORMAT ", diff=%" G_GINT64_FORMAT " ms",
316         audio_last_render_pts, last_render_pts,
317         ((gint64)audio_last_render_pts - (gint64)last_render_pts) / GST_MSECOND);
318     }
319     g_mutex_unlock(&priv->mutex);
320 }
321 
gst_video_display_sink_get_render_time_diff_thd(GstVideoDisplaySink * video_display_sink,GstClockTime duration)322 static void gst_video_display_sink_get_render_time_diff_thd(GstVideoDisplaySink *video_display_sink,
323     GstClockTime duration)
324 {
325     if (!GST_CLOCK_TIME_IS_VALID(duration)) {
326         return;
327     }
328     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
329     if (priv == nullptr) {
330         return;
331     }
332 
333     guint64 render_time_diff_thd = duration + DEFAULT_EXTRA_RENDER_FRAME_DIFF;
334     if (render_time_diff_thd > DEFAULT_MAX_WAIT_CLOCK_TIME) {
335         // Low framerate does not enter smoothing logic to prevent video render too fast.
336         priv->render_time_diff_threshold = G_MAXUINT64;
337         GST_DEBUG_OBJECT(video_display_sink, "render_time_diff_thd is greater than DEFAULT_MAX_WAIT_CLOCK_TIME");
338     } else if (render_time_diff_thd != priv->render_time_diff_threshold) {
339         priv->render_time_diff_threshold = render_time_diff_thd;
340         GST_INFO_OBJECT(video_display_sink,
341             "get new render_time_diff_threshold=%" G_GUINT64_FORMAT, render_time_diff_thd);
342     }
343 }
344 
gst_video_display_sink_do_app_render(GstSurfaceMemSink * surface_sink,GstBuffer * buffer,bool is_preroll)345 static GstFlowReturn gst_video_display_sink_do_app_render(GstSurfaceMemSink *surface_sink,
346     GstBuffer *buffer, bool is_preroll)
347 {
348     (void)is_preroll;
349     g_return_val_if_fail(surface_sink != nullptr && buffer != nullptr, GST_FLOW_ERROR);
350     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(surface_sink);
351 
352     kpi_log_avsync_diff(video_display_sink, GST_BUFFER_PTS(buffer));
353 
354     /* The value of GST_BUFFER_DURATION(buffer) is average duration, which has no reference
355         value in the variable frame rate stream, because the actual duration of each frame varies greatly.
356         It is difficult to obtain the duration of the current frame, so using the duration of the previous
357         frame does not affect perception */
358     GstClockTime last_duration = GST_BUFFER_PTS(buffer) - video_display_sink->priv->last_video_render_pts;
359     if (GST_BUFFER_PTS(buffer) <= video_display_sink->priv->last_video_render_pts ||
360         video_display_sink->priv->last_video_render_pts == 0) {
361         last_duration = GST_BUFFER_DURATION(buffer);
362     }
363 
364     GST_DEBUG_OBJECT(video_display_sink, "avg duration %" G_GUINT64_FORMAT ", last_duration %" G_GUINT64_FORMAT
365         ", pts %" G_GUINT64_FORMAT, GST_BUFFER_DURATION(buffer), last_duration, GST_BUFFER_PTS(buffer));
366     video_display_sink->priv->last_video_render_pts = GST_BUFFER_PTS(buffer);
367     gst_video_display_sink_get_render_time_diff_thd(video_display_sink, last_duration);
368     if (video_display_sink->priv->need_report_bandwidth) {
369         g_signal_emit_by_name(video_display_sink, "bandwidth-change", video_display_sink->priv->bandwidth);
370         video_display_sink->priv->need_report_bandwidth = FALSE;
371     }
372     return GST_FLOW_OK;
373 }
374 
gst_video_get_current_running_time(GstBaseSink * base_sink)375 static GstClockTime gst_video_get_current_running_time(GstBaseSink *base_sink)
376 {
377     GstClockTime base_time = gst_element_get_base_time(GST_ELEMENT(base_sink)); // get base time
378     GstClockTime cur_clock_time = gst_clock_get_time(GST_ELEMENT_CLOCK(base_sink)); // get current clock time
379     if (!GST_CLOCK_TIME_IS_VALID(base_time) || !GST_CLOCK_TIME_IS_VALID(cur_clock_time)) {
380         return GST_CLOCK_TIME_NONE;
381     }
382     if (cur_clock_time < base_time) {
383         return GST_CLOCK_TIME_NONE;
384     }
385     return cur_clock_time - base_time;
386 }
387 
gst_video_display_sink_adjust_reach_time_handle(GstVideoDisplaySink * video_display_sink,GstClockTime & reach_time,gint64 video_running_time_diff,gint64 audio_running_time_diff,gboolean * need_drop_this_buffer)388 static void gst_video_display_sink_adjust_reach_time_handle(GstVideoDisplaySink *video_display_sink,
389     GstClockTime &reach_time, gint64 video_running_time_diff, gint64 audio_running_time_diff,
390     gboolean *need_drop_this_buffer)
391 {
392     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
393 
394     if (video_running_time_diff - audio_running_time_diff > priv->video_delay_time + DEFAULT_VIDEO_BEHIND_AUDIO_THD) {
395         if (priv->enable_drop == TRUE) {
396             if (priv->buffer_count % DEFAULT_DROP_BEHIND_VIDEO_BUF_FREQUENCY == 0) {
397                 GST_WARNING_OBJECT(video_display_sink, "drop this video buffer, num:%" G_GUINT64_FORMAT,
398                     priv->total_video_buffer_num);
399                 *need_drop_this_buffer = TRUE;
400                 priv->dropped_video_buffer_num++;
401             } else {
402                 priv->buffer_count++;
403                 return;
404             }
405         }
406     } else if (video_running_time_diff < audio_running_time_diff &&
407                (audio_running_time_diff - video_running_time_diff) > DEFAULT_AUDIO_RUNNING_TIME_DIFF_THD) {
408         GST_INFO_OBJECT(video_display_sink, "audio is too late, adjust video reach_time, video_running_time_diff=%"
409             G_GUINT64_FORMAT "audio_running_time_diff=%" G_GINT64_FORMAT ", old reach_time=%"
410             G_GUINT64_FORMAT ", new reach_time=%" G_GUINT64_FORMAT,
411             video_running_time_diff, audio_running_time_diff, reach_time,
412             audio_running_time_diff - video_running_time_diff);
413         // The deviation between sound and image exceeds 5ms
414         reach_time += (audio_running_time_diff - video_running_time_diff);
415     }
416 
417     if (priv->enable_drop == TRUE) {
418         priv->buffer_count = 1;
419     }
420 }
421 
gst_video_display_sink_adjust_reach_time_by_jitter(GstBaseSink * base_sink,GstVideoDisplaySink * video_display_sink,GstClockTime reach_time,gboolean * need_drop_this_buffer)422 static GstClockTime gst_video_display_sink_adjust_reach_time_by_jitter(GstBaseSink *base_sink,
423     GstVideoDisplaySink *video_display_sink, GstClockTime reach_time, gboolean *need_drop_this_buffer)
424 {
425     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
426     if (priv == nullptr) {
427         return reach_time;
428     }
429 
430     g_mutex_lock(&priv->mutex);
431     if (priv->audio_sink != nullptr) {
432         gint64 audio_running_time_diff = 0;
433         g_object_get(priv->audio_sink, "last-running-time-diff", &audio_running_time_diff, nullptr);
434         GstClockTime cur_running_time = gst_video_get_current_running_time(base_sink);
435         g_return_val_if_fail(GST_CLOCK_TIME_IS_VALID(cur_running_time), reach_time);
436         gint64 video_running_time_diff = cur_running_time - reach_time;
437 
438         GST_LOG_OBJECT(video_display_sink, "videosink buffer num:%" G_GUINT64_FORMAT ", video_running_time_diff:%"
439             G_GINT64_FORMAT " = cur_running_time:%" G_GUINT64_FORMAT " - reach_time:%" G_GUINT64_FORMAT
440             ", audio_running_time_diff:%" G_GINT64_FORMAT ", buffer_count:%u", priv->total_video_buffer_num,
441             video_running_time_diff, cur_running_time, reach_time, audio_running_time_diff,
442             priv->buffer_count);
443 
444         gst_video_display_sink_adjust_reach_time_handle(video_display_sink, reach_time,
445             video_running_time_diff, audio_running_time_diff, need_drop_this_buffer);
446     }
447     g_mutex_unlock(&priv->mutex);
448     return reach_time;
449 }
450 
gst_video_display_sink_update_reach_time(GstBaseSink * base_sink,GstClockTime reach_time,gboolean * need_drop_this_buffer)451 static GstClockTime gst_video_display_sink_update_reach_time(GstBaseSink *base_sink, GstClockTime reach_time,
452     gboolean *need_drop_this_buffer)
453 {
454     g_return_val_if_fail(base_sink != nullptr, reach_time);
455     g_return_val_if_fail(GST_CLOCK_TIME_IS_VALID(reach_time), reach_time);
456     GstVideoDisplaySink *video_display_sink = GST_VIDEO_DISPLAY_SINK_CAST(base_sink);
457     GstVideoDisplaySinkPrivate *priv = video_display_sink->priv;
458     if (priv == nullptr || priv->render_time_diff_threshold == G_MAXUINT64 || priv->close_avsync == TRUE) {
459         return reach_time;
460     }
461     g_return_val_if_fail(priv != nullptr && priv->audio_sink != nullptr, reach_time);
462     g_return_val_if_fail(priv->render_time_diff_threshold != G_MAXUINT64, reach_time);
463     g_return_val_if_fail(priv->close_avsync != TRUE, reach_time);
464 
465     priv->total_video_buffer_num++;
466 
467     // 1st: update reach_time by audio running time jitter
468     GstClockTime new_reach_time = gst_video_display_sink_adjust_reach_time_by_jitter(base_sink, video_display_sink,
469         reach_time, need_drop_this_buffer);
470     g_return_val_if_fail(GST_CLOCK_TIME_IS_VALID(new_reach_time), reach_time);
471     guint dynamic_delay = 0;
472     if (new_reach_time > reach_time) {
473         dynamic_delay = new_reach_time - reach_time;
474     }
475 
476     // 2ed: update reach_time if the running_time_diff exceeded the threshold
477     guint static_delay = 0;
478     g_object_get(priv->audio_sink, "audio-delay-time", &static_delay, nullptr);
479     priv->audio_delay_time = static_delay + dynamic_delay;
480     GST_INFO_OBJECT(video_display_sink, "audio_dellay_time:%d", priv->audio_delay_time);
481 
482     if (priv->start_first_render) {
483         priv->video_delay_time = 0;
484         priv->start_first_render = FALSE;
485     }
486 
487     // 3th smotth transition
488     if (priv->video_delay_time < priv->audio_delay_time &&
489         priv->audio_delay_time - priv->video_delay_time > DEFAULT_EXTRA_RENDER_FRAME_DIFF) {
490         priv->video_delay_time += DEFAULT_EXTRA_RENDER_FRAME_DIFF;
491     }
492 
493     if (priv->video_delay_time > priv->audio_delay_time &&
494         priv->video_delay_time - priv->audio_delay_time > DEFAULT_EXTRA_RENDER_FRAME_DIFF) {
495         priv->video_delay_time -= DEFAULT_EXTRA_RENDER_FRAME_DIFF;
496     }
497 
498     new_reach_time = reach_time + priv->video_delay_time;
499     if (new_reach_time != reach_time) {
500         GST_INFO_OBJECT(video_display_sink,
501             " old reach_time:%" G_GUINT64_FORMAT
502             " new reach_time:%" G_GUINT64_FORMAT
503             " video delay time:%u", reach_time, new_reach_time, priv->video_delay_time);
504     }
505     return new_reach_time;
506 }
507