1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/browser/media/capture/desktop_capture_device.h"
6
7 #include "base/bind.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/metrics/field_trial.h"
11 #include "base/metrics/histogram.h"
12 #include "base/sequenced_task_runner.h"
13 #include "base/strings/string_number_conversions.h"
14 #include "base/synchronization/lock.h"
15 #include "base/threading/sequenced_worker_pool.h"
16 #include "base/threading/thread.h"
17 #include "content/browser/media/capture/desktop_capture_device_uma_types.h"
18 #include "content/public/browser/browser_thread.h"
19 #include "content/public/browser/desktop_media_id.h"
20 #include "media/base/video_util.h"
21 #include "third_party/libyuv/include/libyuv/scale_argb.h"
22 #include "third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h"
23 #include "third_party/webrtc/modules/desktop_capture/desktop_capture_options.h"
24 #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
25 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
26 #include "third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
27 #include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
28 #include "third_party/webrtc/modules/desktop_capture/window_capturer.h"
29
30 namespace content {
31
32 namespace {
33
34 // Maximum CPU time percentage of a single core that can be consumed for desktop
35 // capturing. This means that on systems where screen scraping is slow we may
36 // need to capture at frame rate lower than requested. This is necessary to keep
37 // UI responsive.
38 const int kMaximumCpuConsumptionPercentage = 50;
39
ComputeLetterboxRect(const webrtc::DesktopSize & max_size,const webrtc::DesktopSize & source_size)40 webrtc::DesktopRect ComputeLetterboxRect(
41 const webrtc::DesktopSize& max_size,
42 const webrtc::DesktopSize& source_size) {
43 gfx::Rect result = media::ComputeLetterboxRegion(
44 gfx::Rect(0, 0, max_size.width(), max_size.height()),
45 gfx::Size(source_size.width(), source_size.height()));
46 return webrtc::DesktopRect::MakeLTRB(
47 result.x(), result.y(), result.right(), result.bottom());
48 }
49
50 } // namespace
51
52 class DesktopCaptureDevice::Core
53 : public base::RefCountedThreadSafe<Core>,
54 public webrtc::DesktopCapturer::Callback {
55 public:
56 Core(scoped_refptr<base::SequencedTaskRunner> task_runner,
57 scoped_ptr<base::Thread> thread,
58 scoped_ptr<webrtc::DesktopCapturer> capturer,
59 DesktopMediaID::Type type);
60
61 // Implementation of VideoCaptureDevice methods.
62 void AllocateAndStart(const media::VideoCaptureParams& params,
63 scoped_ptr<Client> client);
64 void StopAndDeAllocate();
65
66 void SetNotificationWindowId(gfx::NativeViewId window_id);
67
68 private:
69 friend class base::RefCountedThreadSafe<Core>;
70 virtual ~Core();
71
72 // webrtc::DesktopCapturer::Callback interface
73 virtual webrtc::SharedMemory* CreateSharedMemory(size_t size) OVERRIDE;
74 virtual void OnCaptureCompleted(webrtc::DesktopFrame* frame) OVERRIDE;
75
76 // Helper methods that run on the |task_runner_|. Posted from the
77 // corresponding public methods.
78 void DoAllocateAndStart(const media::VideoCaptureParams& params,
79 scoped_ptr<Client> client);
80 void DoStopAndDeAllocate();
81
82 // Chooses new output properties based on the supplied source size and the
83 // properties requested to Allocate(), and dispatches OnFrameInfo[Changed]
84 // notifications.
85 void RefreshCaptureFormat(const webrtc::DesktopSize& frame_size);
86
87 // Method that is scheduled on |task_runner_| to be called on regular interval
88 // to capture a frame.
89 void OnCaptureTimer();
90
91 // Captures a frame and schedules timer for the next one.
92 void CaptureFrameAndScheduleNext();
93
94 // Captures a single frame.
95 void DoCapture();
96
97 void DoSetNotificationWindowId(gfx::NativeViewId window_id);
98
99 // Task runner used for capturing operations.
100 scoped_refptr<base::SequencedTaskRunner> task_runner_;
101
102 // The thread on which the capturer is running.
103 scoped_ptr<base::Thread> thread_;
104
105 // The underlying DesktopCapturer instance used to capture frames.
106 scoped_ptr<webrtc::DesktopCapturer> desktop_capturer_;
107
108 // The device client which proxies device events to the controller. Accessed
109 // on the task_runner_ thread.
110 scoped_ptr<Client> client_;
111
112 // Requested video capture format (width, height, frame rate, etc).
113 media::VideoCaptureParams requested_params_;
114
115 // Actual video capture format being generated.
116 media::VideoCaptureFormat capture_format_;
117
118 // Size of frame most recently captured from the source.
119 webrtc::DesktopSize previous_frame_size_;
120
121 // DesktopFrame into which captured frames are down-scaled and/or letterboxed,
122 // depending upon the caller's requested capture capabilities. If frames can
123 // be returned to the caller directly then this is NULL.
124 scoped_ptr<webrtc::DesktopFrame> output_frame_;
125
126 // Sub-rectangle of |output_frame_| into which the source will be scaled
127 // and/or letterboxed.
128 webrtc::DesktopRect output_rect_;
129
130 // True when we have delayed OnCaptureTimer() task posted on
131 // |task_runner_|.
132 bool capture_task_posted_;
133
134 // True when waiting for |desktop_capturer_| to capture current frame.
135 bool capture_in_progress_;
136
137 // True if the first capture call has returned. Used to log the first capture
138 // result.
139 bool first_capture_returned_;
140
141 // The type of the capturer.
142 DesktopMediaID::Type capturer_type_;
143
144 scoped_ptr<webrtc::BasicDesktopFrame> black_frame_;
145
146 DISALLOW_COPY_AND_ASSIGN(Core);
147 };
148
Core(scoped_refptr<base::SequencedTaskRunner> task_runner,scoped_ptr<base::Thread> thread,scoped_ptr<webrtc::DesktopCapturer> capturer,DesktopMediaID::Type type)149 DesktopCaptureDevice::Core::Core(
150 scoped_refptr<base::SequencedTaskRunner> task_runner,
151 scoped_ptr<base::Thread> thread,
152 scoped_ptr<webrtc::DesktopCapturer> capturer,
153 DesktopMediaID::Type type)
154 : task_runner_(task_runner),
155 thread_(thread.Pass()),
156 desktop_capturer_(capturer.Pass()),
157 capture_task_posted_(false),
158 capture_in_progress_(false),
159 first_capture_returned_(false),
160 capturer_type_(type) {
161 DCHECK(!task_runner_.get() || !thread_.get());
162 if (thread_.get())
163 task_runner_ = thread_->message_loop_proxy();
164 }
165
~Core()166 DesktopCaptureDevice::Core::~Core() {
167 }
168
AllocateAndStart(const media::VideoCaptureParams & params,scoped_ptr<Client> client)169 void DesktopCaptureDevice::Core::AllocateAndStart(
170 const media::VideoCaptureParams& params,
171 scoped_ptr<Client> client) {
172 DCHECK_GT(params.requested_format.frame_size.GetArea(), 0);
173 DCHECK_GT(params.requested_format.frame_rate, 0);
174
175 task_runner_->PostTask(
176 FROM_HERE,
177 base::Bind(
178 &Core::DoAllocateAndStart, this, params, base::Passed(&client)));
179 }
180
StopAndDeAllocate()181 void DesktopCaptureDevice::Core::StopAndDeAllocate() {
182 task_runner_->PostTask(FROM_HERE,
183 base::Bind(&Core::DoStopAndDeAllocate, this));
184 }
185
SetNotificationWindowId(gfx::NativeViewId window_id)186 void DesktopCaptureDevice::Core::SetNotificationWindowId(
187 gfx::NativeViewId window_id) {
188 task_runner_->PostTask(
189 FROM_HERE, base::Bind(&Core::DoSetNotificationWindowId, this, window_id));
190 }
191
192 webrtc::SharedMemory*
CreateSharedMemory(size_t size)193 DesktopCaptureDevice::Core::CreateSharedMemory(size_t size) {
194 return NULL;
195 }
196
OnCaptureCompleted(webrtc::DesktopFrame * frame)197 void DesktopCaptureDevice::Core::OnCaptureCompleted(
198 webrtc::DesktopFrame* frame) {
199 DCHECK(task_runner_->RunsTasksOnCurrentThread());
200 DCHECK(capture_in_progress_);
201
202 if (!first_capture_returned_) {
203 first_capture_returned_ = true;
204 if (capturer_type_ == DesktopMediaID::TYPE_SCREEN) {
205 IncrementDesktopCaptureCounter(frame ? FIRST_SCREEN_CAPTURE_SUCCEEDED
206 : FIRST_SCREEN_CAPTURE_FAILED);
207 } else {
208 IncrementDesktopCaptureCounter(frame ? FIRST_WINDOW_CAPTURE_SUCCEEDED
209 : FIRST_WINDOW_CAPTURE_FAILED);
210 }
211 }
212
213 capture_in_progress_ = false;
214
215 if (!frame) {
216 std::string log("Failed to capture a frame.");
217 LOG(ERROR) << log;
218 client_->OnError(log);
219 return;
220 }
221
222 if (!client_)
223 return;
224
225 base::TimeDelta capture_time(
226 base::TimeDelta::FromMilliseconds(frame->capture_time_ms()));
227 UMA_HISTOGRAM_TIMES(
228 capturer_type_ == DesktopMediaID::TYPE_SCREEN ? kUmaScreenCaptureTime
229 : kUmaWindowCaptureTime,
230 capture_time);
231
232 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
233
234 // On OSX We receive a 1x1 frame when the shared window is minimized. It
235 // cannot be subsampled to I420 and will be dropped downstream. So we replace
236 // it with a black frame to avoid the video appearing frozen at the last
237 // frame.
238 if (frame->size().width() == 1 || frame->size().height() == 1) {
239 if (!black_frame_.get()) {
240 black_frame_.reset(
241 new webrtc::BasicDesktopFrame(
242 webrtc::DesktopSize(capture_format_.frame_size.width(),
243 capture_format_.frame_size.height())));
244 memset(black_frame_->data(),
245 0,
246 black_frame_->stride() * black_frame_->size().height());
247 }
248 owned_frame.reset();
249 frame = black_frame_.get();
250 }
251
252 // Handle initial frame size and size changes.
253 RefreshCaptureFormat(frame->size());
254
255 webrtc::DesktopSize output_size(capture_format_.frame_size.width(),
256 capture_format_.frame_size.height());
257 size_t output_bytes = output_size.width() * output_size.height() *
258 webrtc::DesktopFrame::kBytesPerPixel;
259 const uint8_t* output_data = NULL;
260 scoped_ptr<uint8_t[]> flipped_frame_buffer;
261
262 if (frame->size().equals(output_size)) {
263 // If the captured frame matches the output size, we can return the pixel
264 // data directly, without scaling.
265 output_data = frame->data();
266
267 // If the |frame| generated by the screen capturer is inverted then we need
268 // to flip |frame|.
269 // This happens only on a specific platform. Refer to crbug.com/306876.
270 if (frame->stride() < 0) {
271 int height = frame->size().height();
272 int bytes_per_row =
273 frame->size().width() * webrtc::DesktopFrame::kBytesPerPixel;
274 flipped_frame_buffer.reset(new uint8_t[output_bytes]);
275 uint8_t* dest = flipped_frame_buffer.get();
276 for (int row = 0; row < height; ++row) {
277 memcpy(dest, output_data, bytes_per_row);
278 dest += bytes_per_row;
279 output_data += frame->stride();
280 }
281 output_data = flipped_frame_buffer.get();
282 }
283 } else {
284 // Otherwise we need to down-scale and/or letterbox to the target format.
285
286 // Allocate a buffer of the correct size to scale the frame into.
287 // |output_frame_| is cleared whenever |output_rect_| changes, so we don't
288 // need to worry about clearing out stale pixel data in letterboxed areas.
289 if (!output_frame_) {
290 output_frame_.reset(new webrtc::BasicDesktopFrame(output_size));
291 memset(output_frame_->data(), 0, output_bytes);
292 }
293 DCHECK(output_frame_->size().equals(output_size));
294
295 // TODO(wez): Optimize this to scale only changed portions of the output,
296 // using ARGBScaleClip().
297 uint8_t* output_rect_data = output_frame_->data() +
298 output_frame_->stride() * output_rect_.top() +
299 webrtc::DesktopFrame::kBytesPerPixel * output_rect_.left();
300 libyuv::ARGBScale(frame->data(), frame->stride(),
301 frame->size().width(), frame->size().height(),
302 output_rect_data, output_frame_->stride(),
303 output_rect_.width(), output_rect_.height(),
304 libyuv::kFilterBilinear);
305 output_data = output_frame_->data();
306 }
307
308 client_->OnIncomingCapturedData(
309 output_data, output_bytes, capture_format_, 0, base::TimeTicks::Now());
310 }
311
DoAllocateAndStart(const media::VideoCaptureParams & params,scoped_ptr<Client> client)312 void DesktopCaptureDevice::Core::DoAllocateAndStart(
313 const media::VideoCaptureParams& params,
314 scoped_ptr<Client> client) {
315 DCHECK(task_runner_->RunsTasksOnCurrentThread());
316 DCHECK(desktop_capturer_);
317 DCHECK(client.get());
318 DCHECK(!client_.get());
319
320 client_ = client.Pass();
321 requested_params_ = params;
322
323 capture_format_ = requested_params_.requested_format;
324
325 // This capturer always outputs ARGB, non-interlaced.
326 capture_format_.pixel_format = media::PIXEL_FORMAT_ARGB;
327
328 desktop_capturer_->Start(this);
329
330 CaptureFrameAndScheduleNext();
331 }
332
DoStopAndDeAllocate()333 void DesktopCaptureDevice::Core::DoStopAndDeAllocate() {
334 DCHECK(task_runner_->RunsTasksOnCurrentThread());
335 client_.reset();
336 output_frame_.reset();
337 previous_frame_size_.set(0, 0);
338 desktop_capturer_.reset();
339 }
340
RefreshCaptureFormat(const webrtc::DesktopSize & frame_size)341 void DesktopCaptureDevice::Core::RefreshCaptureFormat(
342 const webrtc::DesktopSize& frame_size) {
343 if (previous_frame_size_.equals(frame_size))
344 return;
345
346 // Clear the output frame, if any, since it will either need resizing, or
347 // clearing of stale data in letterbox areas, anyway.
348 output_frame_.reset();
349
350 if (previous_frame_size_.is_empty() ||
351 requested_params_.allow_resolution_change) {
352 // If this is the first frame, or the receiver supports variable resolution
353 // then determine the output size by treating the requested width & height
354 // as maxima.
355 if (frame_size.width() >
356 requested_params_.requested_format.frame_size.width() ||
357 frame_size.height() >
358 requested_params_.requested_format.frame_size.height()) {
359 output_rect_ = ComputeLetterboxRect(
360 webrtc::DesktopSize(
361 requested_params_.requested_format.frame_size.width(),
362 requested_params_.requested_format.frame_size.height()),
363 frame_size);
364 output_rect_.Translate(-output_rect_.left(), -output_rect_.top());
365 } else {
366 output_rect_ = webrtc::DesktopRect::MakeSize(frame_size);
367 }
368 capture_format_.frame_size.SetSize(output_rect_.width(),
369 output_rect_.height());
370 } else {
371 // Otherwise the output frame size cannot change, so just scale and
372 // letterbox.
373 output_rect_ = ComputeLetterboxRect(
374 webrtc::DesktopSize(capture_format_.frame_size.width(),
375 capture_format_.frame_size.height()),
376 frame_size);
377 }
378
379 previous_frame_size_ = frame_size;
380 }
381
OnCaptureTimer()382 void DesktopCaptureDevice::Core::OnCaptureTimer() {
383 DCHECK(capture_task_posted_);
384 capture_task_posted_ = false;
385
386 if (!client_)
387 return;
388
389 CaptureFrameAndScheduleNext();
390 }
391
CaptureFrameAndScheduleNext()392 void DesktopCaptureDevice::Core::CaptureFrameAndScheduleNext() {
393 DCHECK(task_runner_->RunsTasksOnCurrentThread());
394 DCHECK(!capture_task_posted_);
395
396 base::TimeTicks started_time = base::TimeTicks::Now();
397 DoCapture();
398 base::TimeDelta last_capture_duration = base::TimeTicks::Now() - started_time;
399
400 // Limit frame-rate to reduce CPU consumption.
401 base::TimeDelta capture_period = std::max(
402 (last_capture_duration * 100) / kMaximumCpuConsumptionPercentage,
403 base::TimeDelta::FromSeconds(1) / capture_format_.frame_rate);
404
405 // Schedule a task for the next frame.
406 capture_task_posted_ = true;
407 task_runner_->PostDelayedTask(
408 FROM_HERE, base::Bind(&Core::OnCaptureTimer, this),
409 capture_period - last_capture_duration);
410 }
411
DoCapture()412 void DesktopCaptureDevice::Core::DoCapture() {
413 DCHECK(task_runner_->RunsTasksOnCurrentThread());
414 DCHECK(!capture_in_progress_);
415
416 capture_in_progress_ = true;
417 desktop_capturer_->Capture(webrtc::DesktopRegion());
418
419 // Currently only synchronous implementations of DesktopCapturer are
420 // supported.
421 DCHECK(!capture_in_progress_);
422 }
423
DoSetNotificationWindowId(gfx::NativeViewId window_id)424 void DesktopCaptureDevice::Core::DoSetNotificationWindowId(
425 gfx::NativeViewId window_id) {
426 DCHECK(task_runner_->RunsTasksOnCurrentThread());
427 DCHECK(window_id);
428 desktop_capturer_->SetExcludedWindow(window_id);
429 }
430
431 // static
Create(const DesktopMediaID & source)432 scoped_ptr<media::VideoCaptureDevice> DesktopCaptureDevice::Create(
433 const DesktopMediaID& source) {
434 scoped_ptr<base::Thread> ui_thread;
435
436 webrtc::DesktopCaptureOptions options =
437 webrtc::DesktopCaptureOptions::CreateDefault();
438 // Leave desktop effects enabled during WebRTC captures.
439 options.set_disable_effects(false);
440
441 scoped_ptr<webrtc::DesktopCapturer> capturer;
442
443 switch (source.type) {
444 case DesktopMediaID::TYPE_SCREEN: {
445 scoped_ptr<webrtc::ScreenCapturer> screen_capturer;
446
447 #if defined(OS_WIN)
448 bool magnification_allowed =
449 base::FieldTrialList::FindFullName("ScreenCaptureUseMagnification") ==
450 "Enabled";
451
452 if (magnification_allowed) {
453 // The magnification capturer requires running on a dedicated UI thread.
454 ui_thread.reset(new base::Thread("screenCaptureUIThread"));
455 base::Thread::Options thread_options(base::MessageLoop::TYPE_UI, 0);
456 ui_thread->StartWithOptions(thread_options);
457
458 options.set_allow_use_magnification_api(true);
459 }
460 #endif
461
462 screen_capturer.reset(webrtc::ScreenCapturer::Create(options));
463 if (screen_capturer && screen_capturer->SelectScreen(source.id)) {
464 capturer.reset(new webrtc::DesktopAndCursorComposer(
465 screen_capturer.release(),
466 webrtc::MouseCursorMonitor::CreateForScreen(options, source.id)));
467 IncrementDesktopCaptureCounter(SCREEN_CAPTURER_CREATED);
468 }
469 break;
470 }
471
472 case DesktopMediaID::TYPE_WINDOW: {
473 scoped_ptr<webrtc::WindowCapturer> window_capturer(
474 webrtc::WindowCapturer::Create(options));
475 if (window_capturer && window_capturer->SelectWindow(source.id)) {
476 window_capturer->BringSelectedWindowToFront();
477 capturer.reset(new webrtc::DesktopAndCursorComposer(
478 window_capturer.release(),
479 webrtc::MouseCursorMonitor::CreateForWindow(options, source.id)));
480 IncrementDesktopCaptureCounter(WINDOW_CATPTURER_CREATED);
481 }
482 break;
483 }
484
485 default: {
486 NOTREACHED();
487 }
488 }
489
490 scoped_ptr<media::VideoCaptureDevice> result;
491 if (capturer) {
492 scoped_refptr<base::SequencedTaskRunner> task_runner;
493 if (!ui_thread.get()) {
494 scoped_refptr<base::SequencedWorkerPool> blocking_pool =
495 BrowserThread::GetBlockingPool();
496 task_runner = blocking_pool->GetSequencedTaskRunner(
497 blocking_pool->GetSequenceToken());
498 }
499 result.reset(new DesktopCaptureDevice(
500 task_runner, ui_thread.Pass(), capturer.Pass(), source.type));
501 }
502
503 return result.Pass();
504 }
505
~DesktopCaptureDevice()506 DesktopCaptureDevice::~DesktopCaptureDevice() {
507 StopAndDeAllocate();
508 }
509
AllocateAndStart(const media::VideoCaptureParams & params,scoped_ptr<Client> client)510 void DesktopCaptureDevice::AllocateAndStart(
511 const media::VideoCaptureParams& params,
512 scoped_ptr<Client> client) {
513 core_->AllocateAndStart(params, client.Pass());
514 }
515
StopAndDeAllocate()516 void DesktopCaptureDevice::StopAndDeAllocate() {
517 core_->StopAndDeAllocate();
518 }
519
SetNotificationWindowId(gfx::NativeViewId window_id)520 void DesktopCaptureDevice::SetNotificationWindowId(
521 gfx::NativeViewId window_id) {
522 core_->SetNotificationWindowId(window_id);
523 }
524
DesktopCaptureDevice(scoped_refptr<base::SequencedTaskRunner> task_runner,scoped_ptr<base::Thread> thread,scoped_ptr<webrtc::DesktopCapturer> capturer,DesktopMediaID::Type type)525 DesktopCaptureDevice::DesktopCaptureDevice(
526 scoped_refptr<base::SequencedTaskRunner> task_runner,
527 scoped_ptr<base::Thread> thread,
528 scoped_ptr<webrtc::DesktopCapturer> capturer,
529 DesktopMediaID::Type type)
530 : core_(new Core(task_runner, thread.Pass(), capturer.Pass(), type)) {
531 }
532
533 } // namespace content
534