1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/webrtc_local_audio_renderer.h"
6
7 #include "base/debug/trace_event.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop_proxy.h"
10 #include "base/metrics/histogram.h"
11 #include "base/synchronization/lock.h"
12 #include "content/renderer/media/audio_device_factory.h"
13 #include "content/renderer/media/media_stream_dispatcher.h"
14 #include "content/renderer/media/webrtc_audio_capturer.h"
15 #include "content/renderer/render_frame_impl.h"
16 #include "media/audio/audio_output_device.h"
17 #include "media/base/audio_block_fifo.h"
18 #include "media/base/audio_bus.h"
19
20 namespace content {
21
22 namespace {
23
24 enum LocalRendererSinkStates {
25 kSinkStarted = 0,
26 kSinkNeverStarted,
27 kSinkStatesMax // Must always be last!
28 };
29
30 } // namespace
31
32 // media::AudioRendererSink::RenderCallback implementation
Render(media::AudioBus * audio_bus,int audio_delay_milliseconds)33 int WebRtcLocalAudioRenderer::Render(
34 media::AudioBus* audio_bus, int audio_delay_milliseconds) {
35 TRACE_EVENT0("audio", "WebRtcLocalAudioRenderer::Render");
36 base::AutoLock auto_lock(thread_lock_);
37
38 if (!playing_ || !volume_ || !loopback_fifo_) {
39 audio_bus->Zero();
40 return 0;
41 }
42
43 // Provide data by reading from the FIFO if the FIFO contains enough
44 // to fulfill the request.
45 if (loopback_fifo_->available_blocks()) {
46 const media::AudioBus* audio_data = loopback_fifo_->Consume();
47 DCHECK_EQ(audio_data->frames(), audio_bus->frames());
48 audio_data->CopyTo(audio_bus);
49 } else {
50 audio_bus->Zero();
51 // This warning is perfectly safe if it happens for the first audio
52 // frames. It should not happen in a steady-state mode.
53 DVLOG(2) << "loopback FIFO is empty";
54 }
55
56 return audio_bus->frames();
57 }
58
OnRenderError()59 void WebRtcLocalAudioRenderer::OnRenderError() {
60 NOTIMPLEMENTED();
61 }
62
63 // content::MediaStreamAudioSink implementation
OnData(const int16 * audio_data,int sample_rate,int number_of_channels,int number_of_frames)64 void WebRtcLocalAudioRenderer::OnData(const int16* audio_data,
65 int sample_rate,
66 int number_of_channels,
67 int number_of_frames) {
68 DCHECK(capture_thread_checker_.CalledOnValidThread());
69 TRACE_EVENT0("audio", "WebRtcLocalAudioRenderer::CaptureData");
70 base::AutoLock auto_lock(thread_lock_);
71 if (!playing_ || !volume_ || !loopback_fifo_)
72 return;
73
74 // Push captured audio to FIFO so it can be read by a local sink.
75 if (loopback_fifo_->GetUnfilledFrames() >= number_of_frames) {
76 loopback_fifo_->Push(audio_data, number_of_frames, sizeof(audio_data[0]));
77
78 const base::TimeTicks now = base::TimeTicks::Now();
79 total_render_time_ += now - last_render_time_;
80 last_render_time_ = now;
81 } else {
82 DVLOG(1) << "FIFO is full";
83 }
84 }
85
OnSetFormat(const media::AudioParameters & params)86 void WebRtcLocalAudioRenderer::OnSetFormat(
87 const media::AudioParameters& params) {
88 DVLOG(1) << "WebRtcLocalAudioRenderer::OnSetFormat()";
89 // If the source is restarted, we might have changed to another capture
90 // thread.
91 capture_thread_checker_.DetachFromThread();
92 DCHECK(capture_thread_checker_.CalledOnValidThread());
93
94 // Post a task on the main render thread to reconfigure the |sink_| with the
95 // new format.
96 message_loop_->PostTask(
97 FROM_HERE,
98 base::Bind(&WebRtcLocalAudioRenderer::ReconfigureSink, this,
99 params));
100 }
101
102 // WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer implementation.
WebRtcLocalAudioRenderer(const blink::WebMediaStreamTrack & audio_track,int source_render_view_id,int source_render_frame_id,int session_id,int frames_per_buffer)103 WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer(
104 const blink::WebMediaStreamTrack& audio_track,
105 int source_render_view_id,
106 int source_render_frame_id,
107 int session_id,
108 int frames_per_buffer)
109 : audio_track_(audio_track),
110 source_render_view_id_(source_render_view_id),
111 source_render_frame_id_(source_render_frame_id),
112 session_id_(session_id),
113 message_loop_(base::MessageLoopProxy::current()),
114 playing_(false),
115 frames_per_buffer_(frames_per_buffer),
116 volume_(0.0),
117 sink_started_(false) {
118 DVLOG(1) << "WebRtcLocalAudioRenderer::WebRtcLocalAudioRenderer()";
119 }
120
~WebRtcLocalAudioRenderer()121 WebRtcLocalAudioRenderer::~WebRtcLocalAudioRenderer() {
122 DCHECK(message_loop_->BelongsToCurrentThread());
123 DCHECK(!sink_.get());
124 DVLOG(1) << "WebRtcLocalAudioRenderer::~WebRtcLocalAudioRenderer()";
125 }
126
Start()127 void WebRtcLocalAudioRenderer::Start() {
128 DVLOG(1) << "WebRtcLocalAudioRenderer::Start()";
129 DCHECK(message_loop_->BelongsToCurrentThread());
130
131 // We get audio data from |audio_track_|...
132 MediaStreamAudioSink::AddToAudioTrack(this, audio_track_);
133 // ...and |sink_| will get audio data from us.
134 DCHECK(!sink_.get());
135 sink_ = AudioDeviceFactory::NewOutputDevice(source_render_view_id_,
136 source_render_frame_id_);
137
138 base::AutoLock auto_lock(thread_lock_);
139 last_render_time_ = base::TimeTicks::Now();
140 playing_ = false;
141 }
142
Stop()143 void WebRtcLocalAudioRenderer::Stop() {
144 DVLOG(1) << "WebRtcLocalAudioRenderer::Stop()";
145 DCHECK(message_loop_->BelongsToCurrentThread());
146
147 {
148 base::AutoLock auto_lock(thread_lock_);
149 playing_ = false;
150 loopback_fifo_.reset();
151 }
152
153 // Stop the output audio stream, i.e, stop asking for data to render.
154 // It is safer to call Stop() on the |sink_| to clean up the resources even
155 // when the |sink_| is never started.
156 if (sink_.get()) {
157 sink_->Stop();
158 sink_ = NULL;
159 }
160
161 if (!sink_started_) {
162 UMA_HISTOGRAM_ENUMERATION("Media.LocalRendererSinkStates",
163 kSinkNeverStarted, kSinkStatesMax);
164 }
165 sink_started_ = false;
166
167 // Ensure that the capturer stops feeding us with captured audio.
168 MediaStreamAudioSink::RemoveFromAudioTrack(this, audio_track_);
169 }
170
Play()171 void WebRtcLocalAudioRenderer::Play() {
172 DVLOG(1) << "WebRtcLocalAudioRenderer::Play()";
173 DCHECK(message_loop_->BelongsToCurrentThread());
174
175 if (!sink_.get())
176 return;
177
178 {
179 base::AutoLock auto_lock(thread_lock_);
180 // Resumes rendering by ensuring that WebRtcLocalAudioRenderer::Render()
181 // now reads data from the local FIFO.
182 playing_ = true;
183 last_render_time_ = base::TimeTicks::Now();
184 }
185
186 // Note: If volume_ is currently muted, the |sink_| will not be started yet.
187 MaybeStartSink();
188 }
189
Pause()190 void WebRtcLocalAudioRenderer::Pause() {
191 DVLOG(1) << "WebRtcLocalAudioRenderer::Pause()";
192 DCHECK(message_loop_->BelongsToCurrentThread());
193
194 if (!sink_.get())
195 return;
196
197 base::AutoLock auto_lock(thread_lock_);
198 // Temporarily suspends rendering audio.
199 // WebRtcLocalAudioRenderer::Render() will return early during this state
200 // and only zeros will be provided to the active sink.
201 playing_ = false;
202 }
203
SetVolume(float volume)204 void WebRtcLocalAudioRenderer::SetVolume(float volume) {
205 DVLOG(1) << "WebRtcLocalAudioRenderer::SetVolume(" << volume << ")";
206 DCHECK(message_loop_->BelongsToCurrentThread());
207
208 {
209 base::AutoLock auto_lock(thread_lock_);
210 // Cache the volume.
211 volume_ = volume;
212 }
213
214 // Lazily start the |sink_| when the local renderer is unmuted during
215 // playing.
216 MaybeStartSink();
217
218 if (sink_.get())
219 sink_->SetVolume(volume);
220 }
221
GetCurrentRenderTime() const222 base::TimeDelta WebRtcLocalAudioRenderer::GetCurrentRenderTime() const {
223 DCHECK(message_loop_->BelongsToCurrentThread());
224 base::AutoLock auto_lock(thread_lock_);
225 if (!sink_.get())
226 return base::TimeDelta();
227 return total_render_time();
228 }
229
IsLocalRenderer() const230 bool WebRtcLocalAudioRenderer::IsLocalRenderer() const {
231 return true;
232 }
233
MaybeStartSink()234 void WebRtcLocalAudioRenderer::MaybeStartSink() {
235 DCHECK(message_loop_->BelongsToCurrentThread());
236 DVLOG(1) << "WebRtcLocalAudioRenderer::MaybeStartSink()";
237
238 if (!sink_.get() || !source_params_.IsValid())
239 return;
240
241 {
242 // Clear up the old data in the FIFO.
243 base::AutoLock auto_lock(thread_lock_);
244 loopback_fifo_->Clear();
245 }
246
247 if (!sink_params_.IsValid() || !playing_ || !volume_ || sink_started_)
248 return;
249
250 DVLOG(1) << "WebRtcLocalAudioRenderer::MaybeStartSink() -- Starting sink_.";
251 sink_->InitializeWithSessionId(sink_params_, this, session_id_);
252 sink_->Start();
253 sink_started_ = true;
254 UMA_HISTOGRAM_ENUMERATION("Media.LocalRendererSinkStates",
255 kSinkStarted, kSinkStatesMax);
256 }
257
ReconfigureSink(const media::AudioParameters & params)258 void WebRtcLocalAudioRenderer::ReconfigureSink(
259 const media::AudioParameters& params) {
260 DCHECK(message_loop_->BelongsToCurrentThread());
261
262 DVLOG(1) << "WebRtcLocalAudioRenderer::ReconfigureSink()";
263
264 int implicit_ducking_effect = 0;
265 RenderFrameImpl* const frame =
266 RenderFrameImpl::FromRoutingID(source_render_frame_id_);
267 MediaStreamDispatcher* const dispatcher = frame ?
268 frame->GetMediaStreamDispatcher() : NULL;
269 if (dispatcher && dispatcher->IsAudioDuckingActive()) {
270 DVLOG(1) << "Forcing DUCKING to be ON for output";
271 implicit_ducking_effect = media::AudioParameters::DUCKING;
272 } else {
273 DVLOG(1) << "DUCKING not forced ON for output";
274 }
275
276 if (source_params_.Equals(params))
277 return;
278
279 // Reset the |source_params_|, |sink_params_| and |loopback_fifo_| to match
280 // the new format.
281
282 source_params_ = params;
283
284 sink_params_ = media::AudioParameters(source_params_.format(),
285 source_params_.channel_layout(), source_params_.sample_rate(),
286 source_params_.bits_per_sample(),
287 #if defined(OS_ANDROID)
288 // On Android, input and output use the same sample rate. In order to
289 // use the low latency mode, we need to use the buffer size suggested by
290 // the AudioManager for the sink. It will later be used to decide
291 // the buffer size of the shared memory buffer.
292 frames_per_buffer_,
293 #else
294 2 * source_params_.frames_per_buffer(),
295 #endif
296 // If DUCKING is enabled on the source, it needs to be enabled on the
297 // sink as well.
298 source_params_.effects() | implicit_ducking_effect);
299
300 {
301 // TODO(henrika): we could add a more dynamic solution here but I prefer
302 // a fixed size combined with bad audio at overflow. The alternative is
303 // that we start to build up latency and that can be more difficult to
304 // detect. Tests have shown that the FIFO never contains more than 2 or 3
305 // audio frames but I have selected a max size of ten buffers just
306 // in case since these tests were performed on a 16 core, 64GB Win 7
307 // machine. We could also add some sort of error notifier in this area if
308 // the FIFO overflows.
309 const int blocks_of_buffers =
310 10 * params.frames_per_buffer() / sink_params_.frames_per_buffer() + 1;
311 media::AudioBlockFifo* new_fifo = new media::AudioBlockFifo(
312 params.channels(), sink_params_.frames_per_buffer(), blocks_of_buffers);
313
314 base::AutoLock auto_lock(thread_lock_);
315 loopback_fifo_.reset(new_fifo);
316 }
317
318 if (!sink_.get())
319 return; // WebRtcLocalAudioRenderer has not yet been started.
320
321 // Stop |sink_| and re-create a new one to be initialized with different audio
322 // parameters. Then, invoke MaybeStartSink() to restart everything again.
323 if (sink_started_) {
324 sink_->Stop();
325 sink_started_ = false;
326 }
327
328 sink_ = AudioDeviceFactory::NewOutputDevice(source_render_view_id_,
329 source_render_frame_id_);
330 MaybeStartSink();
331 }
332
333 } // namespace content
334