• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * WebRTC Audio Processing Elements
3  *
4  *  Copyright 2016 Collabora Ltd
5  *    @author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301 USA
20  *
21  */
22 
23 /**
24  * SECTION:element-webrtcechoprobe
25  *
26  * This echo probe is to be used with the webrtcdsp element. See #webrtcdsp
27  * documentation for more details.
28  */
29 
30 #ifdef HAVE_CONFIG_H
31 #include "config.h"
32 #endif
33 
34 #include "gstwebrtcechoprobe.h"
35 
36 #include <webrtc/modules/interface/module_common_types.h>
37 #include <gst/audio/audio.h>
38 
39 GST_DEBUG_CATEGORY_EXTERN (webrtc_dsp_debug);
40 #define GST_CAT_DEFAULT (webrtc_dsp_debug)
41 
42 #define MAX_ADAPTER_SIZE (1*1024*1024)
43 
44 static GstStaticPadTemplate gst_webrtc_echo_probe_sink_template =
45 GST_STATIC_PAD_TEMPLATE ("sink",
46     GST_PAD_SINK,
47     GST_PAD_ALWAYS,
48     GST_STATIC_CAPS ("audio/x-raw, "
49         "format = (string) " GST_AUDIO_NE (S16) ", "
50         "layout = (string) interleaved, "
51         "rate = (int) { 48000, 32000, 16000, 8000 }, "
52         "channels = (int) [1, MAX];"
53         "audio/x-raw, "
54         "format = (string) " GST_AUDIO_NE (F32) ", "
55         "layout = (string) non-interleaved, "
56         "rate = (int) { 48000, 32000, 16000, 8000 }, "
57         "channels = (int) [1, MAX]")
58     );
59 
60 static GstStaticPadTemplate gst_webrtc_echo_probe_src_template =
61 GST_STATIC_PAD_TEMPLATE ("src",
62     GST_PAD_SRC,
63     GST_PAD_ALWAYS,
64     GST_STATIC_CAPS ("audio/x-raw, "
65         "format = (string) " GST_AUDIO_NE (S16) ", "
66         "layout = (string) interleaved, "
67         "rate = (int) { 48000, 32000, 16000, 8000 }, "
68         "channels = (int) [1, MAX];"
69         "audio/x-raw, "
70         "format = (string) " GST_AUDIO_NE (F32) ", "
71         "layout = (string) non-interleaved, "
72         "rate = (int) { 48000, 32000, 16000, 8000 }, "
73         "channels = (int) [1, MAX]")
74     );
75 
76 G_LOCK_DEFINE_STATIC (gst_aec_probes);
77 static GList *gst_aec_probes = NULL;
78 
79 G_DEFINE_TYPE (GstWebrtcEchoProbe, gst_webrtc_echo_probe,
80     GST_TYPE_AUDIO_FILTER);
81 GST_ELEMENT_REGISTER_DEFINE (webrtcechoprobe, "webrtcechoprobe",
82     GST_RANK_NONE, GST_TYPE_WEBRTC_ECHO_PROBE);
83 
84 static gboolean
gst_webrtc_echo_probe_setup(GstAudioFilter * filter,const GstAudioInfo * info)85 gst_webrtc_echo_probe_setup (GstAudioFilter * filter, const GstAudioInfo * info)
86 {
87   GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (filter);
88 
89   GST_LOG_OBJECT (self, "setting format to %s with %i Hz and %i channels",
90       info->finfo->description, info->rate, info->channels);
91 
92   GST_WEBRTC_ECHO_PROBE_LOCK (self);
93 
94   self->info = *info;
95   self->interleaved = (info->layout == GST_AUDIO_LAYOUT_INTERLEAVED);
96 
97   if (!self->interleaved)
98     gst_planar_audio_adapter_configure (self->padapter, info);
99 
100   /* WebRTC library works with 10ms buffers, compute once this size */
101   self->period_samples = info->rate / 100;
102   self->period_size = self->period_samples * info->bpf;
103 
104   if (self->interleaved &&
105       (webrtc::AudioFrame::kMaxDataSizeSamples * 2) < self->period_size)
106     goto period_too_big;
107 
108   GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
109 
110   return TRUE;
111 
112 period_too_big:
113   GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
114   GST_WARNING_OBJECT (self, "webrtcdsp format produce too big period "
115       "(maximum is %" G_GSIZE_FORMAT " samples and we have %u samples), "
116       "reduce the number of channels or the rate.",
117       webrtc::AudioFrame::kMaxDataSizeSamples, self->period_size / 2);
118   return FALSE;
119 }
120 
121 static gboolean
gst_webrtc_echo_probe_stop(GstBaseTransform * btrans)122 gst_webrtc_echo_probe_stop (GstBaseTransform * btrans)
123 {
124   GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (btrans);
125 
126   GST_WEBRTC_ECHO_PROBE_LOCK (self);
127   gst_adapter_clear (self->adapter);
128   gst_planar_audio_adapter_clear (self->padapter);
129   GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
130 
131   return TRUE;
132 }
133 
134 static gboolean
gst_webrtc_echo_probe_src_event(GstBaseTransform * btrans,GstEvent * event)135 gst_webrtc_echo_probe_src_event (GstBaseTransform * btrans, GstEvent * event)
136 {
137   GstBaseTransformClass *klass;
138   GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (btrans);
139   GstClockTime latency;
140   GstClockTime upstream_latency = 0;
141   GstQuery *query;
142 
143   klass = GST_BASE_TRANSFORM_CLASS (gst_webrtc_echo_probe_parent_class);
144 
145   switch (GST_EVENT_TYPE (event)) {
146     case GST_EVENT_LATENCY:
147       gst_event_parse_latency (event, &latency);
148       query = gst_query_new_latency ();
149 
150       if (gst_pad_query (btrans->srcpad, query)) {
151         gst_query_parse_latency (query, NULL, &upstream_latency, NULL);
152 
153         if (!GST_CLOCK_TIME_IS_VALID (upstream_latency))
154           upstream_latency = 0;
155       }
156 
157       GST_WEBRTC_ECHO_PROBE_LOCK (self);
158       self->latency = latency;
159       self->delay = upstream_latency / GST_MSECOND;
160       GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
161 
162       GST_DEBUG_OBJECT (self, "We have a latency of %" GST_TIME_FORMAT
163           " and delay of %ims", GST_TIME_ARGS (latency),
164           (gint) (upstream_latency / GST_MSECOND));
165       break;
166     default:
167       break;
168   }
169 
170   return klass->src_event (btrans, event);
171 }
172 
173 static GstFlowReturn
gst_webrtc_echo_probe_transform_ip(GstBaseTransform * btrans,GstBuffer * buffer)174 gst_webrtc_echo_probe_transform_ip (GstBaseTransform * btrans,
175     GstBuffer * buffer)
176 {
177   GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (btrans);
178   GstBuffer *newbuf = NULL;
179 
180   GST_WEBRTC_ECHO_PROBE_LOCK (self);
181   newbuf = gst_buffer_copy (buffer);
182   /* Moves the buffer timestamp to be in Running time */
183   GST_BUFFER_PTS (newbuf) = gst_segment_to_running_time (&btrans->segment,
184       GST_FORMAT_TIME, GST_BUFFER_PTS (buffer));
185 
186   if (self->interleaved) {
187     gst_adapter_push (self->adapter, newbuf);
188 
189     if (gst_adapter_available (self->adapter) > MAX_ADAPTER_SIZE)
190       gst_adapter_flush (self->adapter,
191           gst_adapter_available (self->adapter) - MAX_ADAPTER_SIZE);
192   } else {
193     gsize available;
194 
195     gst_planar_audio_adapter_push (self->padapter, newbuf);
196     available =
197         gst_planar_audio_adapter_available (self->padapter) * self->info.bpf;
198     if (available > MAX_ADAPTER_SIZE)
199       gst_planar_audio_adapter_flush (self->padapter,
200           (available - MAX_ADAPTER_SIZE) / self->info.bpf);
201   }
202 
203   GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
204 
205   return GST_FLOW_OK;
206 }
207 
208 static void
gst_webrtc_echo_probe_finalize(GObject * object)209 gst_webrtc_echo_probe_finalize (GObject * object)
210 {
211   GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (object);
212 
213   G_LOCK (gst_aec_probes);
214   gst_aec_probes = g_list_remove (gst_aec_probes, self);
215   G_UNLOCK (gst_aec_probes);
216 
217   gst_object_unref (self->adapter);
218   gst_object_unref (self->padapter);
219   self->adapter = NULL;
220   self->padapter = NULL;
221 
222   G_OBJECT_CLASS (gst_webrtc_echo_probe_parent_class)->finalize (object);
223 }
224 
225 static void
gst_webrtc_echo_probe_init(GstWebrtcEchoProbe * self)226 gst_webrtc_echo_probe_init (GstWebrtcEchoProbe * self)
227 {
228   self->adapter = gst_adapter_new ();
229   self->padapter = gst_planar_audio_adapter_new ();
230   gst_audio_info_init (&self->info);
231   g_mutex_init (&self->lock);
232 
233   self->latency = GST_CLOCK_TIME_NONE;
234 
235   G_LOCK (gst_aec_probes);
236   gst_aec_probes = g_list_prepend (gst_aec_probes, self);
237   G_UNLOCK (gst_aec_probes);
238 }
239 
240 static void
gst_webrtc_echo_probe_class_init(GstWebrtcEchoProbeClass * klass)241 gst_webrtc_echo_probe_class_init (GstWebrtcEchoProbeClass * klass)
242 {
243   GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
244   GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
245   GstBaseTransformClass *btrans_class = GST_BASE_TRANSFORM_CLASS (klass);
246   GstAudioFilterClass *audiofilter_class = GST_AUDIO_FILTER_CLASS (klass);
247 
248   gobject_class->finalize = gst_webrtc_echo_probe_finalize;
249 
250   btrans_class->passthrough_on_same_caps = TRUE;
251   btrans_class->src_event = GST_DEBUG_FUNCPTR (gst_webrtc_echo_probe_src_event);
252   btrans_class->transform_ip =
253       GST_DEBUG_FUNCPTR (gst_webrtc_echo_probe_transform_ip);
254   btrans_class->stop = GST_DEBUG_FUNCPTR (gst_webrtc_echo_probe_stop);
255 
256   audiofilter_class->setup = GST_DEBUG_FUNCPTR (gst_webrtc_echo_probe_setup);
257 
258   gst_element_class_add_static_pad_template (element_class,
259       &gst_webrtc_echo_probe_src_template);
260   gst_element_class_add_static_pad_template (element_class,
261       &gst_webrtc_echo_probe_sink_template);
262 
263   gst_element_class_set_static_metadata (element_class,
264       "Acoustic Echo Canceller probe",
265       "Generic/Audio",
266       "Gathers playback buffers for webrtcdsp",
267       "Nicolas Dufresne <nicolas.dufrsesne@collabora.com>");
268 }
269 
270 
271 GstWebrtcEchoProbe *
gst_webrtc_acquire_echo_probe(const gchar * name)272 gst_webrtc_acquire_echo_probe (const gchar * name)
273 {
274   GstWebrtcEchoProbe *ret = NULL;
275   GList *l;
276 
277   G_LOCK (gst_aec_probes);
278   for (l = gst_aec_probes; l; l = l->next) {
279     GstWebrtcEchoProbe *probe = GST_WEBRTC_ECHO_PROBE (l->data);
280 
281     GST_WEBRTC_ECHO_PROBE_LOCK (probe);
282     if (!probe->acquired && g_strcmp0 (GST_OBJECT_NAME (probe), name) == 0) {
283       probe->acquired = TRUE;
284       ret = GST_WEBRTC_ECHO_PROBE (gst_object_ref (probe));
285       GST_WEBRTC_ECHO_PROBE_UNLOCK (probe);
286       break;
287     }
288     GST_WEBRTC_ECHO_PROBE_UNLOCK (probe);
289   }
290   G_UNLOCK (gst_aec_probes);
291 
292   return ret;
293 }
294 
295 void
gst_webrtc_release_echo_probe(GstWebrtcEchoProbe * probe)296 gst_webrtc_release_echo_probe (GstWebrtcEchoProbe * probe)
297 {
298   GST_WEBRTC_ECHO_PROBE_LOCK (probe);
299   probe->acquired = FALSE;
300   GST_WEBRTC_ECHO_PROBE_UNLOCK (probe);
301   gst_object_unref (probe);
302 }
303 
304 gint
gst_webrtc_echo_probe_read(GstWebrtcEchoProbe * self,GstClockTime rec_time,gpointer _frame,GstBuffer ** buf)305 gst_webrtc_echo_probe_read (GstWebrtcEchoProbe * self, GstClockTime rec_time,
306     gpointer _frame, GstBuffer ** buf)
307 {
308   webrtc::AudioFrame * frame = (webrtc::AudioFrame *) _frame;
309   GstClockTimeDiff diff;
310   gsize avail, skip, offset, size;
311   gint delay = -1;
312 
313   GST_WEBRTC_ECHO_PROBE_LOCK (self);
314 
315   if (!GST_CLOCK_TIME_IS_VALID (self->latency) ||
316       !GST_AUDIO_INFO_IS_VALID (&self->info))
317     goto done;
318 
319   if (self->interleaved)
320     avail = gst_adapter_available (self->adapter) / self->info.bpf;
321   else
322     avail = gst_planar_audio_adapter_available (self->padapter);
323 
324   /* In delay agnostic mode, just return 10ms of data */
325   if (!GST_CLOCK_TIME_IS_VALID (rec_time)) {
326     if (avail < self->period_samples)
327       goto done;
328 
329     size = self->period_samples;
330     skip = 0;
331     offset = 0;
332 
333     goto copy;
334   }
335 
336   if (avail == 0) {
337     diff = G_MAXINT64;
338   } else {
339     GstClockTime play_time;
340     guint64 distance;
341 
342     if (self->interleaved) {
343       play_time = gst_adapter_prev_pts (self->adapter, &distance);
344       distance /= self->info.bpf;
345     } else {
346       play_time = gst_planar_audio_adapter_prev_pts (self->padapter, &distance);
347     }
348 
349     if (GST_CLOCK_TIME_IS_VALID (play_time)) {
350       play_time += gst_util_uint64_scale_int (distance, GST_SECOND,
351           self->info.rate);
352       play_time += self->latency;
353 
354       diff = GST_CLOCK_DIFF (rec_time, play_time) / GST_MSECOND;
355     } else {
356       /* We have no timestamp, assume perfect delay */
357       diff = self->delay;
358     }
359   }
360 
361   if (diff > self->delay) {
362     skip = (diff - self->delay) * self->info.rate / 1000;
363     skip = MIN (self->period_samples, skip);
364     offset = 0;
365   } else {
366     skip = 0;
367     offset = (self->delay - diff) * self->info.rate / 1000;
368     offset = MIN (avail, offset);
369   }
370 
371   size = MIN (avail - offset, self->period_samples - skip);
372 
373 copy:
374   if (self->interleaved) {
375     skip *= self->info.bpf;
376     offset *= self->info.bpf;
377     size *= self->info.bpf;
378 
379     if (size < self->period_size)
380       memset (frame->data_, 0, self->period_size);
381 
382     if (size) {
383       gst_adapter_copy (self->adapter, (guint8 *) frame->data_ + skip,
384           offset, size);
385       gst_adapter_flush (self->adapter, offset + size);
386     }
387   } else {
388     GstBuffer *ret, *taken, *tmp;
389 
390     if (size) {
391       gst_planar_audio_adapter_flush (self->padapter, offset);
392 
393       /* we need to fill silence at the beginning and/or the end of each
394        * channel plane in order to have exactly period_samples in the buffer */
395       if (size < self->period_samples) {
396         GstAudioMeta *meta;
397         gint bps = self->info.finfo->width / 8;
398         gsize padding = self->period_samples - (skip + size);
399         gint c;
400 
401         taken = gst_planar_audio_adapter_take_buffer (self->padapter, size,
402             GST_MAP_READ);
403         meta = gst_buffer_get_audio_meta (taken);
404         ret = gst_buffer_new ();
405 
406         for (c = 0; c < meta->info.channels; c++) {
407           /* need some silence at the beginning */
408           if (skip) {
409             tmp = gst_buffer_new_allocate (NULL, skip * bps, NULL);
410             gst_buffer_memset (tmp, 0, 0, skip * bps);
411             ret = gst_buffer_append (ret, tmp);
412           }
413 
414           tmp = gst_buffer_copy_region (taken, GST_BUFFER_COPY_MEMORY,
415               meta->offsets[c], size * bps);
416           ret = gst_buffer_append (ret, tmp);
417 
418           /* need some silence at the end */
419           if (padding) {
420             tmp = gst_buffer_new_allocate (NULL, padding * bps, NULL);
421             gst_buffer_memset (tmp, 0, 0, padding * bps);
422             ret = gst_buffer_append (ret, tmp);
423           }
424         }
425 
426         gst_buffer_unref (taken);
427         gst_buffer_add_audio_meta (ret, &self->info, self->period_samples,
428             NULL);
429       } else {
430         ret = gst_planar_audio_adapter_take_buffer (self->padapter, size,
431           GST_MAP_READWRITE);
432       }
433     } else {
434       ret = gst_buffer_new_allocate (NULL, self->period_size, NULL);
435       gst_buffer_memset (ret, 0, 0, self->period_size);
436       gst_buffer_add_audio_meta (ret, &self->info, self->period_samples,
437           NULL);
438     }
439 
440     *buf = ret;
441   }
442 
443   frame->num_channels_ = self->info.channels;
444   frame->sample_rate_hz_ = self->info.rate;
445   frame->samples_per_channel_ = self->period_samples;
446 
447   delay = self->delay;
448 
449 done:
450   GST_WEBRTC_ECHO_PROBE_UNLOCK (self);
451 
452   return delay;
453 }
454