1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "remoting/host/video_scheduler.h"
6
7 #include <algorithm>
8
9 #include "base/bind.h"
10 #include "base/callback.h"
11 #include "base/logging.h"
12 #include "base/memory/scoped_ptr.h"
13 #include "base/message_loop/message_loop_proxy.h"
14 #include "base/stl_util.h"
15 #include "base/sys_info.h"
16 #include "base/time/time.h"
17 #include "remoting/proto/control.pb.h"
18 #include "remoting/proto/internal.pb.h"
19 #include "remoting/proto/video.pb.h"
20 #include "remoting/protocol/cursor_shape_stub.h"
21 #include "remoting/protocol/message_decoder.h"
22 #include "remoting/protocol/video_stub.h"
23 #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
24 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
25 #include "third_party/webrtc/modules/desktop_capture/mouse_cursor.h"
26 #include "third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h"
27
28 namespace remoting {
29
30 // Maximum number of frames that can be processed simultaneously.
31 // TODO(hclam): Move this value to CaptureScheduler.
32 static const int kMaxPendingFrames = 2;
33
34 // Interval between empty keep-alive frames. These frames are sent only when the
35 // stream is paused or inactive for some other reason (e.g. when blocked on
36 // capturer). To prevent PseudoTCP from resetting congestion window this value
37 // must be smaller than the minimum RTO used in PseudoTCP, which is 250ms.
38 static const int kKeepAlivePacketIntervalMs = 200;
39
40 static bool g_enable_timestamps = false;
41
42 // static
EnableTimestampsForTests()43 void VideoScheduler::EnableTimestampsForTests() {
44 g_enable_timestamps = true;
45 }
46
VideoScheduler(scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner,scoped_refptr<base::SingleThreadTaskRunner> encode_task_runner,scoped_refptr<base::SingleThreadTaskRunner> network_task_runner,scoped_ptr<webrtc::DesktopCapturer> capturer,scoped_ptr<webrtc::MouseCursorMonitor> mouse_cursor_monitor,scoped_ptr<VideoEncoder> encoder,protocol::CursorShapeStub * cursor_stub,protocol::VideoStub * video_stub)47 VideoScheduler::VideoScheduler(
48 scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner,
49 scoped_refptr<base::SingleThreadTaskRunner> encode_task_runner,
50 scoped_refptr<base::SingleThreadTaskRunner> network_task_runner,
51 scoped_ptr<webrtc::DesktopCapturer> capturer,
52 scoped_ptr<webrtc::MouseCursorMonitor> mouse_cursor_monitor,
53 scoped_ptr<VideoEncoder> encoder,
54 protocol::CursorShapeStub* cursor_stub,
55 protocol::VideoStub* video_stub)
56 : capture_task_runner_(capture_task_runner),
57 encode_task_runner_(encode_task_runner),
58 network_task_runner_(network_task_runner),
59 capturer_(capturer.Pass()),
60 mouse_cursor_monitor_(mouse_cursor_monitor.Pass()),
61 encoder_(encoder.Pass()),
62 cursor_stub_(cursor_stub),
63 video_stub_(video_stub),
64 pending_frames_(0),
65 capture_pending_(false),
66 did_skip_frame_(false),
67 is_paused_(false),
68 sequence_number_(0) {
69 DCHECK(network_task_runner_->BelongsToCurrentThread());
70 DCHECK(capturer_);
71 DCHECK(mouse_cursor_monitor_);
72 DCHECK(encoder_);
73 DCHECK(cursor_stub_);
74 DCHECK(video_stub_);
75 }
76
77 // Public methods --------------------------------------------------------------
78
CreateSharedMemory(size_t size)79 webrtc::SharedMemory* VideoScheduler::CreateSharedMemory(size_t size) {
80 return NULL;
81 }
82
OnCaptureCompleted(webrtc::DesktopFrame * frame)83 void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
84 DCHECK(capture_task_runner_->BelongsToCurrentThread());
85
86 capture_pending_ = false;
87
88 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
89
90 if (owned_frame) {
91 scheduler_.RecordCaptureTime(
92 base::TimeDelta::FromMilliseconds(owned_frame->capture_time_ms()));
93 }
94
95 // Even when |frame| is NULL we still need to post it to the encode thread
96 // to make sure frames are freed in the same order they are received and
97 // that we don't start capturing frame n+2 before frame n is freed.
98 encode_task_runner_->PostTask(
99 FROM_HERE, base::Bind(&VideoScheduler::EncodeFrame, this,
100 base::Passed(&owned_frame), sequence_number_,
101 base::TimeTicks::Now()));
102
103 // If a frame was skipped, try to capture it again.
104 if (did_skip_frame_) {
105 capture_task_runner_->PostTask(
106 FROM_HERE, base::Bind(&VideoScheduler::CaptureNextFrame, this));
107 }
108 }
109
OnMouseCursor(webrtc::MouseCursor * cursor)110 void VideoScheduler::OnMouseCursor(webrtc::MouseCursor* cursor) {
111 DCHECK(capture_task_runner_->BelongsToCurrentThread());
112
113 scoped_ptr<webrtc::MouseCursor> owned_cursor(cursor);
114
115 // Do nothing if the scheduler is being stopped.
116 if (!capturer_)
117 return;
118
119 scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
120 new protocol::CursorShapeInfo());
121 cursor_proto->set_width(cursor->image()->size().width());
122 cursor_proto->set_height(cursor->image()->size().height());
123 cursor_proto->set_hotspot_x(cursor->hotspot().x());
124 cursor_proto->set_hotspot_y(cursor->hotspot().y());
125
126 std::string data;
127 uint8_t* current_row = cursor->image()->data();
128 for (int y = 0; y < cursor->image()->size().height(); ++y) {
129 cursor_proto->mutable_data()->append(
130 current_row,
131 current_row + cursor->image()->size().width() *
132 webrtc::DesktopFrame::kBytesPerPixel);
133 current_row += cursor->image()->stride();
134 }
135
136 network_task_runner_->PostTask(
137 FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
138 base::Passed(&cursor_proto)));
139 }
140
OnMouseCursorPosition(webrtc::MouseCursorMonitor::CursorState state,const webrtc::DesktopVector & position)141 void VideoScheduler::OnMouseCursorPosition(
142 webrtc::MouseCursorMonitor::CursorState state,
143 const webrtc::DesktopVector& position) {
144 // We're not subscribing to mouse position changes.
145 NOTREACHED();
146 }
147
Start()148 void VideoScheduler::Start() {
149 DCHECK(network_task_runner_->BelongsToCurrentThread());
150
151 capture_task_runner_->PostTask(
152 FROM_HERE, base::Bind(&VideoScheduler::StartOnCaptureThread, this));
153 }
154
Stop()155 void VideoScheduler::Stop() {
156 DCHECK(network_task_runner_->BelongsToCurrentThread());
157
158 // Clear stubs to prevent further updates reaching the client.
159 cursor_stub_ = NULL;
160 video_stub_ = NULL;
161
162 keep_alive_timer_.reset();
163
164 capture_task_runner_->PostTask(
165 FROM_HERE, base::Bind(&VideoScheduler::StopOnCaptureThread, this));
166 }
167
Pause(bool pause)168 void VideoScheduler::Pause(bool pause) {
169 if (!capture_task_runner_->BelongsToCurrentThread()) {
170 DCHECK(network_task_runner_->BelongsToCurrentThread());
171 capture_task_runner_->PostTask(
172 FROM_HERE, base::Bind(&VideoScheduler::Pause, this, pause));
173 return;
174 }
175
176 if (is_paused_ != pause) {
177 is_paused_ = pause;
178
179 // Restart captures if we're resuming and there are none scheduled.
180 if (!is_paused_ && capture_timer_ && !capture_timer_->IsRunning())
181 CaptureNextFrame();
182 }
183 }
184
UpdateSequenceNumber(int64 sequence_number)185 void VideoScheduler::UpdateSequenceNumber(int64 sequence_number) {
186 if (!capture_task_runner_->BelongsToCurrentThread()) {
187 DCHECK(network_task_runner_->BelongsToCurrentThread());
188 capture_task_runner_->PostTask(
189 FROM_HERE, base::Bind(&VideoScheduler::UpdateSequenceNumber,
190 this, sequence_number));
191 return;
192 }
193
194 sequence_number_ = sequence_number;
195 }
196
SetLosslessEncode(bool want_lossless)197 void VideoScheduler::SetLosslessEncode(bool want_lossless) {
198 if (!encode_task_runner_->BelongsToCurrentThread()) {
199 DCHECK(network_task_runner_->BelongsToCurrentThread());
200 encode_task_runner_->PostTask(
201 FROM_HERE, base::Bind(&VideoScheduler::SetLosslessEncode,
202 this, want_lossless));
203 return;
204 }
205
206 encoder_->SetLosslessEncode(want_lossless);
207 }
208
SetLosslessColor(bool want_lossless)209 void VideoScheduler::SetLosslessColor(bool want_lossless) {
210 if (!encode_task_runner_->BelongsToCurrentThread()) {
211 DCHECK(network_task_runner_->BelongsToCurrentThread());
212 encode_task_runner_->PostTask(
213 FROM_HERE, base::Bind(&VideoScheduler::SetLosslessColor,
214 this, want_lossless));
215 return;
216 }
217
218 encoder_->SetLosslessColor(want_lossless);
219 }
220
221 // Private methods -----------------------------------------------------------
222
~VideoScheduler()223 VideoScheduler::~VideoScheduler() {
224 // Destroy the capturer and encoder on their respective threads.
225 capture_task_runner_->DeleteSoon(FROM_HERE, capturer_.release());
226 capture_task_runner_->DeleteSoon(FROM_HERE, mouse_cursor_monitor_.release());
227 encode_task_runner_->DeleteSoon(FROM_HERE, encoder_.release());
228 }
229
230 // Capturer thread -------------------------------------------------------------
231
StartOnCaptureThread()232 void VideoScheduler::StartOnCaptureThread() {
233 DCHECK(capture_task_runner_->BelongsToCurrentThread());
234 DCHECK(!capture_timer_);
235
236 // Start mouse cursor monitor.
237 mouse_cursor_monitor_->Init(this, webrtc::MouseCursorMonitor::SHAPE_ONLY);
238
239 // Start the capturer.
240 capturer_->Start(this);
241
242 capture_timer_.reset(new base::OneShotTimer<VideoScheduler>());
243 keep_alive_timer_.reset(new base::DelayTimer<VideoScheduler>(
244 FROM_HERE, base::TimeDelta::FromMilliseconds(kKeepAlivePacketIntervalMs),
245 this, &VideoScheduler::SendKeepAlivePacket));
246
247 // Capture first frame immediately.
248 CaptureNextFrame();
249 }
250
StopOnCaptureThread()251 void VideoScheduler::StopOnCaptureThread() {
252 DCHECK(capture_task_runner_->BelongsToCurrentThread());
253
254 // This doesn't deleted already captured frames, so encoder can keep using the
255 // frames that were captured previously.
256 capturer_.reset();
257
258 // |capture_timer_| must be destroyed on the thread on which it is used.
259 capture_timer_.reset();
260 }
261
ScheduleNextCapture()262 void VideoScheduler::ScheduleNextCapture() {
263 DCHECK(capture_task_runner_->BelongsToCurrentThread());
264
265 capture_timer_->Start(FROM_HERE,
266 scheduler_.NextCaptureDelay(),
267 this,
268 &VideoScheduler::CaptureNextFrame);
269 }
270
CaptureNextFrame()271 void VideoScheduler::CaptureNextFrame() {
272 DCHECK(capture_task_runner_->BelongsToCurrentThread());
273
274 // If we are stopping (|capturer_| is NULL), or paused, then don't capture.
275 if (!capturer_ || is_paused_)
276 return;
277
278 // Make sure we have at most two outstanding recordings. We can simply return
279 // if we can't make a capture now, the next capture will be started by the
280 // end of an encode operation.
281 if (pending_frames_ >= kMaxPendingFrames || capture_pending_) {
282 did_skip_frame_ = true;
283 return;
284 }
285
286 did_skip_frame_ = false;
287
288 // At this point we are going to perform one capture so save the current time.
289 pending_frames_++;
290 DCHECK_LE(pending_frames_, kMaxPendingFrames);
291
292 // Before doing a capture schedule for the next one.
293 ScheduleNextCapture();
294
295 capture_pending_ = true;
296
297 // Capture the mouse shape.
298 mouse_cursor_monitor_->Capture();
299
300 // And finally perform one capture.
301 capturer_->Capture(webrtc::DesktopRegion());
302 }
303
FrameCaptureCompleted()304 void VideoScheduler::FrameCaptureCompleted() {
305 DCHECK(capture_task_runner_->BelongsToCurrentThread());
306
307 // Decrement the pending capture count.
308 pending_frames_--;
309 DCHECK_GE(pending_frames_, 0);
310
311 // If we've skipped a frame capture because too we had too many captures
312 // pending then schedule one now.
313 if (did_skip_frame_)
314 CaptureNextFrame();
315 }
316
317 // Network thread --------------------------------------------------------------
318
SendVideoPacket(scoped_ptr<VideoPacket> packet)319 void VideoScheduler::SendVideoPacket(scoped_ptr<VideoPacket> packet) {
320 DCHECK(network_task_runner_->BelongsToCurrentThread());
321
322 if (!video_stub_)
323 return;
324
325 video_stub_->ProcessVideoPacket(
326 packet.Pass(), base::Bind(&VideoScheduler::OnVideoPacketSent, this));
327 }
328
OnVideoPacketSent()329 void VideoScheduler::OnVideoPacketSent() {
330 DCHECK(network_task_runner_->BelongsToCurrentThread());
331
332 if (!video_stub_)
333 return;
334
335 keep_alive_timer_->Reset();
336
337 capture_task_runner_->PostTask(
338 FROM_HERE, base::Bind(&VideoScheduler::FrameCaptureCompleted, this));
339 }
340
SendKeepAlivePacket()341 void VideoScheduler::SendKeepAlivePacket() {
342 DCHECK(network_task_runner_->BelongsToCurrentThread());
343
344 if (!video_stub_)
345 return;
346
347 video_stub_->ProcessVideoPacket(
348 scoped_ptr<VideoPacket>(new VideoPacket()),
349 base::Bind(&VideoScheduler::OnKeepAlivePacketSent, this));
350 }
351
OnKeepAlivePacketSent()352 void VideoScheduler::OnKeepAlivePacketSent() {
353 DCHECK(network_task_runner_->BelongsToCurrentThread());
354
355 if (keep_alive_timer_)
356 keep_alive_timer_->Reset();
357 }
358
SendCursorShape(scoped_ptr<protocol::CursorShapeInfo> cursor_shape)359 void VideoScheduler::SendCursorShape(
360 scoped_ptr<protocol::CursorShapeInfo> cursor_shape) {
361 DCHECK(network_task_runner_->BelongsToCurrentThread());
362
363 if (!cursor_stub_)
364 return;
365
366 cursor_stub_->SetCursorShape(*cursor_shape);
367 }
368
369 // Encoder thread --------------------------------------------------------------
370
EncodeFrame(scoped_ptr<webrtc::DesktopFrame> frame,int64 sequence_number,base::TimeTicks timestamp)371 void VideoScheduler::EncodeFrame(
372 scoped_ptr<webrtc::DesktopFrame> frame,
373 int64 sequence_number,
374 base::TimeTicks timestamp) {
375 DCHECK(encode_task_runner_->BelongsToCurrentThread());
376
377 // If there is nothing to encode then send an empty packet.
378 if (!frame || frame->updated_region().is_empty()) {
379 capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
380 scoped_ptr<VideoPacket> packet(new VideoPacket());
381 packet->set_client_sequence_number(sequence_number);
382 network_task_runner_->PostTask(
383 FROM_HERE,
384 base::Bind(
385 &VideoScheduler::SendVideoPacket, this, base::Passed(&packet)));
386 return;
387 }
388
389 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame);
390 packet->set_client_sequence_number(sequence_number);
391
392 if (g_enable_timestamps) {
393 packet->set_timestamp(timestamp.ToInternalValue());
394 }
395
396 // Destroy the frame before sending |packet| because SendVideoPacket() may
397 // trigger another frame to be captured, and the screen capturer expects the
398 // old frame to be freed by then.
399 frame.reset();
400
401 scheduler_.RecordEncodeTime(
402 base::TimeDelta::FromMilliseconds(packet->encode_time_ms()));
403 network_task_runner_->PostTask(
404 FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
405 base::Passed(&packet)));
406 }
407
408 } // namespace remoting
409