1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/android/webmediaplayer_android.h"
6
7 #include <limits>
8
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/logging.h"
13 #include "base/metrics/histogram.h"
14 #include "base/strings/string_number_conversions.h"
15 #include "cc/layers/video_layer.h"
16 #include "content/public/common/content_client.h"
17 #include "content/renderer/media/android/proxy_media_keys.h"
18 #include "content/renderer/media/android/renderer_demuxer_android.h"
19 #include "content/renderer/media/android/renderer_media_player_manager.h"
20 #include "content/renderer/media/crypto/key_systems.h"
21 #include "content/renderer/media/webmediaplayer_delegate.h"
22 #include "content/renderer/media/webmediaplayer_util.h"
23 #include "content/renderer/render_thread_impl.h"
24 #include "gpu/GLES2/gl2extchromium.h"
25 #include "grit/content_resources.h"
26 #include "media/base/android/media_player_android.h"
27 #include "media/base/bind_to_loop.h"
28 #include "media/base/media_switches.h"
29 #include "media/base/video_frame.h"
30 #include "net/base/mime_util.h"
31 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h"
32 #include "third_party/WebKit/public/platform/WebString.h"
33 #include "third_party/WebKit/public/web/WebDocument.h"
34 #include "third_party/WebKit/public/web/WebFrame.h"
35 #include "third_party/WebKit/public/web/WebRuntimeFeatures.h"
36 #include "third_party/WebKit/public/web/WebView.h"
37 #include "third_party/skia/include/core/SkBitmap.h"
38 #include "third_party/skia/include/core/SkCanvas.h"
39 #include "third_party/skia/include/core/SkPaint.h"
40 #include "ui/gfx/image/image.h"
41 #include "webkit/renderer/compositor_bindings/web_layer_impl.h"
42
43 #if defined(GOOGLE_TV)
44 #include "content/renderer/media/media_stream_audio_renderer.h"
45 #include "content/renderer/media/media_stream_client.h"
46 #endif
47
48 static const uint32 kGLTextureExternalOES = 0x8D65;
49
50 using blink::WebMediaPlayer;
51 using blink::WebSize;
52 using blink::WebString;
53 using blink::WebTimeRanges;
54 using blink::WebURL;
55 using media::MediaPlayerAndroid;
56 using media::VideoFrame;
57
58 namespace {
59 // Prefix for histograms related to Encrypted Media Extensions.
60 const char* kMediaEme = "Media.EME.";
61 } // namespace
62
63 namespace content {
64
65 // static
OnReleaseRemotePlaybackTexture(const scoped_refptr<base::MessageLoopProxy> & main_loop,const base::WeakPtr<WebMediaPlayerAndroid> & player,uint32 sync_point)66 void WebMediaPlayerAndroid::OnReleaseRemotePlaybackTexture(
67 const scoped_refptr<base::MessageLoopProxy>& main_loop,
68 const base::WeakPtr<WebMediaPlayerAndroid>& player,
69 uint32 sync_point) {
70 main_loop->PostTask(
71 FROM_HERE,
72 base::Bind(&WebMediaPlayerAndroid::DoReleaseRemotePlaybackTexture,
73 player,
74 sync_point));
75 }
76
WebMediaPlayerAndroid(blink::WebFrame * frame,blink::WebMediaPlayerClient * client,base::WeakPtr<WebMediaPlayerDelegate> delegate,RendererMediaPlayerManager * manager,StreamTextureFactory * factory,const scoped_refptr<base::MessageLoopProxy> & media_loop,media::MediaLog * media_log)77 WebMediaPlayerAndroid::WebMediaPlayerAndroid(
78 blink::WebFrame* frame,
79 blink::WebMediaPlayerClient* client,
80 base::WeakPtr<WebMediaPlayerDelegate> delegate,
81 RendererMediaPlayerManager* manager,
82 StreamTextureFactory* factory,
83 const scoped_refptr<base::MessageLoopProxy>& media_loop,
84 media::MediaLog* media_log)
85 : frame_(frame),
86 client_(client),
87 delegate_(delegate),
88 buffered_(1u),
89 main_loop_(base::MessageLoopProxy::current()),
90 media_loop_(media_loop),
91 ignore_metadata_duration_change_(false),
92 pending_seek_(false),
93 seeking_(false),
94 did_loading_progress_(false),
95 manager_(manager),
96 network_state_(WebMediaPlayer::NetworkStateEmpty),
97 ready_state_(WebMediaPlayer::ReadyStateHaveNothing),
98 remote_playback_texture_id_(0),
99 texture_id_(0),
100 texture_mailbox_sync_point_(0),
101 stream_id_(0),
102 is_playing_(false),
103 playing_started_(false),
104 needs_establish_peer_(true),
105 stream_texture_proxy_initialized_(false),
106 has_size_info_(false),
107 has_media_metadata_(false),
108 has_media_info_(false),
109 stream_texture_factory_(factory),
110 needs_external_surface_(false),
111 video_frame_provider_client_(NULL),
112 #if defined(GOOGLE_TV)
113 external_surface_threshold_(-1),
114 demuxer_(NULL),
115 media_stream_client_(NULL),
116 #endif // defined(GOOGLE_TV)
117 pending_playback_(false),
118 player_type_(MEDIA_PLAYER_TYPE_URL),
119 current_time_(0),
120 is_remote_(false),
121 media_log_(media_log),
122 weak_factory_(this) {
123 DCHECK(manager_);
124
125 DCHECK(main_thread_checker_.CalledOnValidThread());
126
127 // We want to be notified of |main_loop_| destruction.
128 base::MessageLoop::current()->AddDestructionObserver(this);
129
130 player_id_ = manager_->RegisterMediaPlayer(this);
131
132 #if defined(GOOGLE_TV)
133 if (CommandLine::ForCurrentProcess()->HasSwitch(
134 switches::kUseExternalVideoSurfaceThresholdInPixels)) {
135 if (!base::StringToInt(
136 CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
137 switches::kUseExternalVideoSurfaceThresholdInPixels),
138 &external_surface_threshold_)) {
139 external_surface_threshold_ = -1;
140 }
141 }
142 #endif // defined(GOOGLE_TV)
143
144 #if defined(VIDEO_HOLE)
145 // Defer stream texture creation until we are sure it's necessary.
146 needs_establish_peer_ = false;
147 current_frame_ = VideoFrame::CreateBlackFrame(gfx::Size(1, 1));
148 #endif // defined(VIDEO_HOLE)
149 TryCreateStreamTextureProxyIfNeeded();
150
151 if (blink::WebRuntimeFeatures::isPrefixedEncryptedMediaEnabled()) {
152 // TODO(xhwang): Report an error when there is encrypted stream but EME is
153 // not enabled. Currently the player just doesn't start and waits for ever.
154 decryptor_.reset(new ProxyDecryptor(
155 #if defined(ENABLE_PEPPER_CDMS)
156 client,
157 frame,
158 #else
159 manager_,
160 player_id_, // TODO(xhwang): Use media_keys_id when MediaKeys are
161 // separated from WebMediaPlayer.
162 #endif // defined(ENABLE_PEPPER_CDMS)
163 // |decryptor_| is owned, so Unretained() is safe here.
164 base::Bind(&WebMediaPlayerAndroid::OnKeyAdded, base::Unretained(this)),
165 base::Bind(&WebMediaPlayerAndroid::OnKeyError, base::Unretained(this)),
166 base::Bind(&WebMediaPlayerAndroid::OnKeyMessage,
167 base::Unretained(this))));
168 }
169 }
170
~WebMediaPlayerAndroid()171 WebMediaPlayerAndroid::~WebMediaPlayerAndroid() {
172 SetVideoFrameProviderClient(NULL);
173 client_->setWebLayer(NULL);
174
175 if (manager_) {
176 manager_->DestroyPlayer(player_id_);
177 manager_->UnregisterMediaPlayer(player_id_);
178 }
179
180 if (stream_id_)
181 stream_texture_factory_->DestroyStreamTexture(texture_id_);
182
183 if (remote_playback_texture_id_) {
184 blink::WebGraphicsContext3D* context =
185 stream_texture_factory_->Context3d();
186 if (context->makeContextCurrent())
187 context->deleteTexture(remote_playback_texture_id_);
188 }
189
190 if (base::MessageLoop::current())
191 base::MessageLoop::current()->RemoveDestructionObserver(this);
192
193 if (player_type_ == MEDIA_PLAYER_TYPE_MEDIA_SOURCE && delegate_)
194 delegate_->PlayerGone(this);
195
196 #if defined(GOOGLE_TV)
197 if (audio_renderer_) {
198 if (audio_renderer_->IsLocalRenderer()) {
199 audio_renderer_->Stop();
200 } else if (!paused()) {
201 // The |audio_renderer_| can be shared by multiple remote streams, and
202 // it will be stopped when WebRtcAudioDeviceImpl goes away. So we simply
203 // pause the |audio_renderer_| here to avoid re-creating the
204 // |audio_renderer_|.
205 audio_renderer_->Pause();
206 }
207 }
208 if (demuxer_ && !destroy_demuxer_cb_.is_null()) {
209 media_source_delegate_.reset();
210 destroy_demuxer_cb_.Run();
211 }
212 #endif
213 }
214
load(LoadType load_type,const blink::WebURL & url,CORSMode cors_mode)215 void WebMediaPlayerAndroid::load(LoadType load_type,
216 const blink::WebURL& url,
217 CORSMode cors_mode) {
218 switch (load_type) {
219 case LoadTypeURL:
220 player_type_ = MEDIA_PLAYER_TYPE_URL;
221 break;
222
223 case LoadTypeMediaSource:
224 player_type_ = MEDIA_PLAYER_TYPE_MEDIA_SOURCE;
225 break;
226
227 case LoadTypeMediaStream:
228 #if defined(GOOGLE_TV)
229 player_type_ = MEDIA_PLAYER_TYPE_MEDIA_STREAM;
230 break;
231 #else
232 CHECK(false) << "WebMediaPlayerAndroid doesn't support MediaStream on "
233 "this platform";
234 return;
235 #endif
236 }
237
238 has_media_metadata_ = false;
239 has_media_info_ = false;
240
241 media::SetDecryptorReadyCB set_decryptor_ready_cb;
242 if (decryptor_) { // |decryptor_| can be NULL is EME if not enabled.
243 set_decryptor_ready_cb = base::Bind(&ProxyDecryptor::SetDecryptorReadyCB,
244 base::Unretained(decryptor_.get()));
245 }
246
247 int demuxer_client_id = 0;
248 if (player_type_ != MEDIA_PLAYER_TYPE_URL) {
249 has_media_info_ = true;
250
251 RendererDemuxerAndroid* demuxer =
252 RenderThreadImpl::current()->renderer_demuxer();
253 demuxer_client_id = demuxer->GetNextDemuxerClientID();
254
255 media_source_delegate_.reset(new MediaSourceDelegate(
256 demuxer, demuxer_client_id, media_loop_, media_log_));
257
258 // |media_source_delegate_| is owned, so Unretained() is safe here.
259 if (player_type_ == MEDIA_PLAYER_TYPE_MEDIA_SOURCE) {
260 media_source_delegate_->InitializeMediaSource(
261 base::Bind(&WebMediaPlayerAndroid::OnMediaSourceOpened,
262 weak_factory_.GetWeakPtr()),
263 base::Bind(&WebMediaPlayerAndroid::OnNeedKey, base::Unretained(this)),
264 set_decryptor_ready_cb,
265 base::Bind(&WebMediaPlayerAndroid::UpdateNetworkState,
266 weak_factory_.GetWeakPtr()),
267 base::Bind(&WebMediaPlayerAndroid::OnDurationChanged,
268 weak_factory_.GetWeakPtr()));
269 }
270 #if defined(GOOGLE_TV)
271 // TODO(xhwang): Pass set_decryptor_ready_cb in InitializeMediaStream() to
272 // enable ClearKey support for Google TV.
273 if (player_type_ == MEDIA_PLAYER_TYPE_MEDIA_STREAM) {
274 media_source_delegate_->InitializeMediaStream(
275 demuxer_,
276 base::Bind(&WebMediaPlayerAndroid::UpdateNetworkState,
277 weak_factory_.GetWeakPtr()));
278 audio_renderer_ = media_stream_client_->GetAudioRenderer(url);
279 if (audio_renderer_)
280 audio_renderer_->Start();
281 }
282 #endif
283 } else {
284 info_loader_.reset(
285 new MediaInfoLoader(
286 url,
287 cors_mode,
288 base::Bind(&WebMediaPlayerAndroid::DidLoadMediaInfo,
289 base::Unretained(this))));
290 info_loader_->Start(frame_);
291 }
292
293 url_ = url;
294 GURL first_party_url = frame_->document().firstPartyForCookies();
295 manager_->Initialize(
296 player_type_, player_id_, url, first_party_url, demuxer_client_id);
297
298 if (manager_->ShouldEnterFullscreen(frame_))
299 manager_->EnterFullscreen(player_id_, frame_);
300
301 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading);
302 UpdateReadyState(WebMediaPlayer::ReadyStateHaveNothing);
303 }
304
DidLoadMediaInfo(MediaInfoLoader::Status status)305 void WebMediaPlayerAndroid::DidLoadMediaInfo(
306 MediaInfoLoader::Status status) {
307 DCHECK(!media_source_delegate_);
308 if (status == MediaInfoLoader::kFailed) {
309 info_loader_.reset();
310 UpdateNetworkState(WebMediaPlayer::NetworkStateNetworkError);
311 return;
312 }
313
314 has_media_info_ = true;
315 if (has_media_metadata_ &&
316 ready_state_ != WebMediaPlayer::ReadyStateHaveEnoughData) {
317 UpdateReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
318 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
319 }
320 // Android doesn't start fetching resources until an implementation-defined
321 // event (e.g. playback request) occurs. Sets the network state to IDLE
322 // if play is not requested yet.
323 if (!playing_started_)
324 UpdateNetworkState(WebMediaPlayer::NetworkStateIdle);
325 }
326
play()327 void WebMediaPlayerAndroid::play() {
328 #if defined(VIDEO_HOLE)
329 if (hasVideo() && needs_external_surface_ &&
330 !manager_->IsInFullscreen(frame_)) {
331 DCHECK(!needs_establish_peer_);
332 manager_->RequestExternalSurface(player_id_, last_computed_rect_);
333 }
334 #endif // defined(VIDEO_HOLE)
335 #if defined(GOOGLE_TV)
336 if (audio_renderer_ && paused())
337 audio_renderer_->Play();
338 #endif // defined(GOOGLE_TV)
339
340 TryCreateStreamTextureProxyIfNeeded();
341 if (hasVideo() && needs_establish_peer_)
342 EstablishSurfaceTexturePeer();
343
344 if (paused())
345 manager_->Start(player_id_);
346 UpdatePlayingState(true);
347 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading);
348 playing_started_ = true;
349 }
350
pause()351 void WebMediaPlayerAndroid::pause() {
352 pause(true);
353 }
354
pause(bool is_media_related_action)355 void WebMediaPlayerAndroid::pause(bool is_media_related_action) {
356 #if defined(GOOGLE_TV)
357 if (audio_renderer_ && !paused())
358 audio_renderer_->Pause();
359 #endif
360 manager_->Pause(player_id_, is_media_related_action);
361 UpdatePlayingState(false);
362 }
363
seek(double seconds)364 void WebMediaPlayerAndroid::seek(double seconds) {
365 DCHECK(main_loop_->BelongsToCurrentThread());
366 DVLOG(1) << __FUNCTION__ << "(" << seconds << ")";
367
368 base::TimeDelta new_seek_time = ConvertSecondsToTimestamp(seconds);
369
370 if (seeking_) {
371 if (new_seek_time == seek_time_) {
372 if (media_source_delegate_) {
373 if (!pending_seek_) {
374 // If using media source demuxer, only suppress redundant seeks if
375 // there is no pending seek. This enforces that any pending seek that
376 // results in a demuxer seek is preceded by matching
377 // CancelPendingSeek() and StartWaitingForSeek() calls.
378 return;
379 }
380 } else {
381 // Suppress all redundant seeks if unrestricted by media source
382 // demuxer API.
383 pending_seek_ = false;
384 return;
385 }
386 }
387
388 pending_seek_ = true;
389 pending_seek_time_ = new_seek_time;
390
391 if (media_source_delegate_)
392 media_source_delegate_->CancelPendingSeek(pending_seek_time_);
393
394 // Later, OnSeekComplete will trigger the pending seek.
395 return;
396 }
397
398 seeking_ = true;
399 seek_time_ = new_seek_time;
400
401 if (media_source_delegate_)
402 media_source_delegate_->StartWaitingForSeek(seek_time_);
403
404 // Kick off the asynchronous seek!
405 manager_->Seek(player_id_, seek_time_);
406 }
407
supportsFullscreen() const408 bool WebMediaPlayerAndroid::supportsFullscreen() const {
409 return true;
410 }
411
supportsSave() const412 bool WebMediaPlayerAndroid::supportsSave() const {
413 return false;
414 }
415
setRate(double rate)416 void WebMediaPlayerAndroid::setRate(double rate) {
417 NOTIMPLEMENTED();
418 }
419
setVolume(double volume)420 void WebMediaPlayerAndroid::setVolume(double volume) {
421 manager_->SetVolume(player_id_, volume);
422 }
423
hasVideo() const424 bool WebMediaPlayerAndroid::hasVideo() const {
425 // If we have obtained video size information before, use it.
426 if (has_size_info_)
427 return !natural_size_.isEmpty();
428
429 // TODO(qinmin): need a better method to determine whether the current media
430 // content contains video. Android does not provide any function to do
431 // this.
432 // We don't know whether the current media content has video unless
433 // the player is prepared. If the player is not prepared, we fall back
434 // to the mime-type. There may be no mime-type on a redirect URL.
435 // In that case, we conservatively assume it contains video so that
436 // enterfullscreen call will not fail.
437 if (!url_.has_path())
438 return false;
439 std::string mime;
440 if (!net::GetMimeTypeFromFile(base::FilePath(url_.path()), &mime))
441 return true;
442 return mime.find("audio/") == std::string::npos;
443 }
444
hasAudio() const445 bool WebMediaPlayerAndroid::hasAudio() const {
446 // TODO(hclam): Query status of audio and return the actual value.
447 return true;
448 }
449
paused() const450 bool WebMediaPlayerAndroid::paused() const {
451 return !is_playing_;
452 }
453
seeking() const454 bool WebMediaPlayerAndroid::seeking() const {
455 return seeking_;
456 }
457
duration() const458 double WebMediaPlayerAndroid::duration() const {
459 // HTML5 spec requires duration to be NaN if readyState is HAVE_NOTHING
460 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing)
461 return std::numeric_limits<double>::quiet_NaN();
462
463 // TODO(wolenetz): Correctly handle durations that MediaSourcePlayer
464 // considers unseekable, including kInfiniteDuration().
465 // See http://crbug.com/248396
466 return duration_.InSecondsF();
467 }
468
currentTime() const469 double WebMediaPlayerAndroid::currentTime() const {
470 // If the player is processing a seek, return the seek time.
471 // Blink may still query us if updatePlaybackState() occurs while seeking.
472 if (seeking()) {
473 return pending_seek_ ?
474 pending_seek_time_.InSecondsF() : seek_time_.InSecondsF();
475 }
476
477 return current_time_;
478 }
479
naturalSize() const480 WebSize WebMediaPlayerAndroid::naturalSize() const {
481 return natural_size_;
482 }
483
networkState() const484 WebMediaPlayer::NetworkState WebMediaPlayerAndroid::networkState() const {
485 return network_state_;
486 }
487
readyState() const488 WebMediaPlayer::ReadyState WebMediaPlayerAndroid::readyState() const {
489 return ready_state_;
490 }
491
buffered()492 const WebTimeRanges& WebMediaPlayerAndroid::buffered() {
493 if (media_source_delegate_)
494 return media_source_delegate_->Buffered();
495 return buffered_;
496 }
497
maxTimeSeekable() const498 double WebMediaPlayerAndroid::maxTimeSeekable() const {
499 // If we haven't even gotten to ReadyStateHaveMetadata yet then just
500 // return 0 so that the seekable range is empty.
501 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata)
502 return 0.0;
503
504 // TODO(hclam): If this stream is not seekable this should return 0.
505 return duration();
506 }
507
didLoadingProgress() const508 bool WebMediaPlayerAndroid::didLoadingProgress() const {
509 bool ret = did_loading_progress_;
510 did_loading_progress_ = false;
511 return ret;
512 }
513
paint(blink::WebCanvas * canvas,const blink::WebRect & rect,unsigned char alpha)514 void WebMediaPlayerAndroid::paint(blink::WebCanvas* canvas,
515 const blink::WebRect& rect,
516 unsigned char alpha) {
517 NOTIMPLEMENTED();
518 }
519
copyVideoTextureToPlatformTexture(blink::WebGraphicsContext3D * web_graphics_context,unsigned int texture,unsigned int level,unsigned int internal_format,unsigned int type,bool premultiply_alpha,bool flip_y)520 bool WebMediaPlayerAndroid::copyVideoTextureToPlatformTexture(
521 blink::WebGraphicsContext3D* web_graphics_context,
522 unsigned int texture,
523 unsigned int level,
524 unsigned int internal_format,
525 unsigned int type,
526 bool premultiply_alpha,
527 bool flip_y) {
528 // ---> FORK <----
529 return false;
530 // ---> END FORK <----
531 if (is_remote_ || !texture_id_)
532 return false;
533
534 // For hidden video element (with style "display:none"), ensure the texture
535 // size is set.
536 if (cached_stream_texture_size_.width != natural_size_.width ||
537 cached_stream_texture_size_.height != natural_size_.height) {
538 stream_texture_factory_->SetStreamTextureSize(
539 stream_id_, gfx::Size(natural_size_.width, natural_size_.height));
540 cached_stream_texture_size_ = natural_size_;
541 }
542
543 // Ensure the target of texture is set before copyTextureCHROMIUM, otherwise
544 // an invalid texture target may be used for copy texture.
545 web_graphics_context->bindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id_);
546
547 // The video is stored in an unmultiplied format, so premultiply if
548 // necessary.
549 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM,
550 premultiply_alpha);
551
552 // Application itself needs to take care of setting the right flip_y
553 // value down to get the expected result.
554 // flip_y==true means to reverse the video orientation while
555 // flip_y==false means to keep the intrinsic orientation.
556 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y);
557 web_graphics_context->copyTextureCHROMIUM(GL_TEXTURE_2D, texture_id_,
558 texture, level, internal_format,
559 type);
560 web_graphics_context->pixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false);
561 web_graphics_context->pixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM,
562 false);
563
564 web_graphics_context->bindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
565 return true;
566 }
567
hasSingleSecurityOrigin() const568 bool WebMediaPlayerAndroid::hasSingleSecurityOrigin() const {
569 if (info_loader_)
570 return info_loader_->HasSingleOrigin();
571 // The info loader may have failed.
572 if (player_type_ == MEDIA_PLAYER_TYPE_URL)
573 return false;
574 return true;
575 }
576
didPassCORSAccessCheck() const577 bool WebMediaPlayerAndroid::didPassCORSAccessCheck() const {
578 if (info_loader_)
579 return info_loader_->DidPassCORSAccessCheck();
580 return false;
581 }
582
mediaTimeForTimeValue(double timeValue) const583 double WebMediaPlayerAndroid::mediaTimeForTimeValue(double timeValue) const {
584 return ConvertSecondsToTimestamp(timeValue).InSecondsF();
585 }
586
decodedFrameCount() const587 unsigned WebMediaPlayerAndroid::decodedFrameCount() const {
588 if (media_source_delegate_)
589 return media_source_delegate_->DecodedFrameCount();
590 NOTIMPLEMENTED();
591 return 0;
592 }
593
droppedFrameCount() const594 unsigned WebMediaPlayerAndroid::droppedFrameCount() const {
595 if (media_source_delegate_)
596 return media_source_delegate_->DroppedFrameCount();
597 NOTIMPLEMENTED();
598 return 0;
599 }
600
audioDecodedByteCount() const601 unsigned WebMediaPlayerAndroid::audioDecodedByteCount() const {
602 if (media_source_delegate_)
603 return media_source_delegate_->AudioDecodedByteCount();
604 NOTIMPLEMENTED();
605 return 0;
606 }
607
videoDecodedByteCount() const608 unsigned WebMediaPlayerAndroid::videoDecodedByteCount() const {
609 if (media_source_delegate_)
610 return media_source_delegate_->VideoDecodedByteCount();
611 NOTIMPLEMENTED();
612 return 0;
613 }
614
OnMediaMetadataChanged(const base::TimeDelta & duration,int width,int height,bool success)615 void WebMediaPlayerAndroid::OnMediaMetadataChanged(
616 const base::TimeDelta& duration, int width, int height, bool success) {
617 bool need_to_signal_duration_changed = false;
618
619 if (url_.SchemeIs("file"))
620 UpdateNetworkState(WebMediaPlayer::NetworkStateLoaded);
621
622 // Update duration, if necessary, prior to ready state updates that may
623 // cause duration() query.
624 // TODO(wolenetz): Correctly handle durations that MediaSourcePlayer
625 // considers unseekable, including kInfiniteDuration().
626 // See http://crbug.com/248396
627 if (!ignore_metadata_duration_change_ && duration_ != duration) {
628 duration_ = duration;
629
630 // Client readyState transition from HAVE_NOTHING to HAVE_METADATA
631 // already triggers a durationchanged event. If this is a different
632 // transition, remember to signal durationchanged.
633 // Do not ever signal durationchanged on metadata change in MSE case
634 // because OnDurationChanged() handles this.
635 if (ready_state_ > WebMediaPlayer::ReadyStateHaveNothing &&
636 player_type_ != MEDIA_PLAYER_TYPE_MEDIA_SOURCE) {
637 need_to_signal_duration_changed = true;
638 }
639 }
640
641 has_media_metadata_ = true;
642 if (has_media_info_ &&
643 ready_state_ != WebMediaPlayer::ReadyStateHaveEnoughData) {
644 UpdateReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
645 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
646 }
647
648 // TODO(wolenetz): Should we just abort early and set network state to an
649 // error if success == false? See http://crbug.com/248399
650 if (success)
651 OnVideoSizeChanged(width, height);
652
653 if (hasVideo() && !video_weblayer_ && client_->needsWebLayerForVideo()) {
654 video_weblayer_.reset(
655 new webkit::WebLayerImpl(cc::VideoLayer::Create(this)));
656 client_->setWebLayer(video_weblayer_.get());
657 }
658
659 if (need_to_signal_duration_changed)
660 client_->durationChanged();
661 }
662
OnPlaybackComplete()663 void WebMediaPlayerAndroid::OnPlaybackComplete() {
664 // When playback is about to finish, android media player often stops
665 // at a time which is smaller than the duration. This makes webkit never
666 // know that the playback has finished. To solve this, we set the
667 // current time to media duration when OnPlaybackComplete() get called.
668 OnTimeUpdate(duration_);
669 client_->timeChanged();
670
671 // if the loop attribute is set, timeChanged() will update the current time
672 // to 0. It will perform a seek to 0. As the requests to the renderer
673 // process are sequential, the OnSeekComplete() will only occur
674 // once OnPlaybackComplete() is done. As the playback can only be executed
675 // upon completion of OnSeekComplete(), the request needs to be saved.
676 is_playing_ = false;
677 if (seeking_ && seek_time_ == base::TimeDelta())
678 pending_playback_ = true;
679 }
680
OnBufferingUpdate(int percentage)681 void WebMediaPlayerAndroid::OnBufferingUpdate(int percentage) {
682 buffered_[0].end = duration() * percentage / 100;
683 did_loading_progress_ = true;
684 }
685
OnSeekRequest(const base::TimeDelta & time_to_seek)686 void WebMediaPlayerAndroid::OnSeekRequest(const base::TimeDelta& time_to_seek) {
687 DCHECK(main_loop_->BelongsToCurrentThread());
688 client_->requestSeek(time_to_seek.InSecondsF());
689 }
690
OnSeekComplete(const base::TimeDelta & current_time)691 void WebMediaPlayerAndroid::OnSeekComplete(
692 const base::TimeDelta& current_time) {
693 DCHECK(main_loop_->BelongsToCurrentThread());
694 seeking_ = false;
695 if (pending_seek_) {
696 pending_seek_ = false;
697 seek(pending_seek_time_.InSecondsF());
698 return;
699 }
700
701 OnTimeUpdate(current_time);
702
703 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
704
705 client_->timeChanged();
706
707 if (pending_playback_) {
708 play();
709 pending_playback_ = false;
710 }
711 }
712
OnMediaError(int error_type)713 void WebMediaPlayerAndroid::OnMediaError(int error_type) {
714 switch (error_type) {
715 case MediaPlayerAndroid::MEDIA_ERROR_FORMAT:
716 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError);
717 break;
718 case MediaPlayerAndroid::MEDIA_ERROR_DECODE:
719 UpdateNetworkState(WebMediaPlayer::NetworkStateDecodeError);
720 break;
721 case MediaPlayerAndroid::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
722 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError);
723 break;
724 case MediaPlayerAndroid::MEDIA_ERROR_INVALID_CODE:
725 break;
726 }
727 client_->repaint();
728 }
729
OnVideoSizeChanged(int width,int height)730 void WebMediaPlayerAndroid::OnVideoSizeChanged(int width, int height) {
731 has_size_info_ = true;
732 if (natural_size_.width == width && natural_size_.height == height)
733 return;
734
735 #if defined(VIDEO_HOLE)
736 bool has_surface_size_restriction = false;
737 #if defined(GOOGLE_TV)
738 has_surface_size_restriction = external_surface_threshold_ >= 0 &&
739 external_surface_threshold_ <= width * height;
740 #endif // defined(GOOGLE_TV)
741 // Use H/W surface for MSE as the content might be protected.
742 // TODO(qinmin): Change this so that only EME needs the H/W surface
743 if (media_source_delegate_ || has_surface_size_restriction) {
744 needs_external_surface_ = true;
745 if (!paused() && !manager_->IsInFullscreen(frame_))
746 manager_->RequestExternalSurface(player_id_, last_computed_rect_);
747 } else if (stream_texture_factory_ && !stream_id_) {
748 // Do deferred stream texture creation finally.
749 DoCreateStreamTexture();
750 if (paused()) {
751 SetNeedsEstablishPeer(true);
752 } else {
753 EstablishSurfaceTexturePeer();
754 }
755 }
756 #else
757 // When play() gets called, |natural_size_| may still be empty and
758 // EstablishSurfaceTexturePeer() will not get called. As a result, the video
759 // may play without a surface texture. When we finally get the valid video
760 // size here, we should call EstablishSurfaceTexturePeer() if it has not been
761 // previously called.
762 if (!paused() && needs_establish_peer_)
763 EstablishSurfaceTexturePeer();
764 #endif // defined(VIDEO_HOLE)
765
766 natural_size_.width = width;
767 natural_size_.height = height;
768 ReallocateVideoFrame();
769 }
770
OnTimeUpdate(const base::TimeDelta & current_time)771 void WebMediaPlayerAndroid::OnTimeUpdate(const base::TimeDelta& current_time) {
772 DCHECK(main_loop_->BelongsToCurrentThread());
773 current_time_ = current_time.InSecondsF();
774 }
775
OnConnectedToRemoteDevice()776 void WebMediaPlayerAndroid::OnConnectedToRemoteDevice() {
777 DCHECK(main_thread_checker_.CalledOnValidThread());
778 DCHECK(!media_source_delegate_);
779 DrawRemotePlaybackIcon();
780 is_remote_ = true;
781 SetNeedsEstablishPeer(false);
782 }
783
OnDisconnectedFromRemoteDevice()784 void WebMediaPlayerAndroid::OnDisconnectedFromRemoteDevice() {
785 DCHECK(main_thread_checker_.CalledOnValidThread());
786 DCHECK(!media_source_delegate_);
787 SetNeedsEstablishPeer(true);
788 if (!paused())
789 EstablishSurfaceTexturePeer();
790 is_remote_ = false;
791 ReallocateVideoFrame();
792 }
793
OnDidEnterFullscreen()794 void WebMediaPlayerAndroid::OnDidEnterFullscreen() {
795 if (!manager_->IsInFullscreen(frame_)) {
796 frame_->view()->willEnterFullScreen();
797 frame_->view()->didEnterFullScreen();
798 manager_->DidEnterFullscreen(frame_);
799 }
800 }
801
OnDidExitFullscreen()802 void WebMediaPlayerAndroid::OnDidExitFullscreen() {
803 // |needs_external_surface_| is always false on non-TV devices.
804 if (!needs_external_surface_)
805 SetNeedsEstablishPeer(true);
806 // We had the fullscreen surface connected to Android MediaPlayer,
807 // so reconnect our surface texture for embedded playback.
808 if (!paused() && needs_establish_peer_)
809 EstablishSurfaceTexturePeer();
810
811 #if defined(VIDEO_HOLE)
812 if (!paused() && needs_external_surface_)
813 manager_->RequestExternalSurface(player_id_, last_computed_rect_);
814 #endif // defined(VIDEO_HOLE)
815
816 frame_->view()->willExitFullScreen();
817 frame_->view()->didExitFullScreen();
818 manager_->DidExitFullscreen();
819 client_->repaint();
820 }
821
OnMediaPlayerPlay()822 void WebMediaPlayerAndroid::OnMediaPlayerPlay() {
823 UpdatePlayingState(true);
824 client_->playbackStateChanged();
825 }
826
OnMediaPlayerPause()827 void WebMediaPlayerAndroid::OnMediaPlayerPause() {
828 UpdatePlayingState(false);
829 client_->playbackStateChanged();
830 }
831
OnRequestFullscreen()832 void WebMediaPlayerAndroid::OnRequestFullscreen() {
833 client_->requestFullscreen();
834 }
835
OnDurationChanged(const base::TimeDelta & duration)836 void WebMediaPlayerAndroid::OnDurationChanged(const base::TimeDelta& duration) {
837 DCHECK(main_loop_->BelongsToCurrentThread());
838 // Only MSE |player_type_| registers this callback.
839 DCHECK_EQ(player_type_, MEDIA_PLAYER_TYPE_MEDIA_SOURCE);
840
841 // Cache the new duration value and trust it over any subsequent duration
842 // values received in OnMediaMetadataChanged().
843 // TODO(wolenetz): Correctly handle durations that MediaSourcePlayer
844 // considers unseekable, including kInfiniteDuration().
845 // See http://crbug.com/248396
846 duration_ = duration;
847 ignore_metadata_duration_change_ = true;
848
849 // Notify MediaPlayerClient that duration has changed, if > HAVE_NOTHING.
850 if (ready_state_ > WebMediaPlayer::ReadyStateHaveNothing)
851 client_->durationChanged();
852 }
853
UpdateNetworkState(WebMediaPlayer::NetworkState state)854 void WebMediaPlayerAndroid::UpdateNetworkState(
855 WebMediaPlayer::NetworkState state) {
856 DCHECK(main_loop_->BelongsToCurrentThread());
857 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing &&
858 (state == WebMediaPlayer::NetworkStateNetworkError ||
859 state == WebMediaPlayer::NetworkStateDecodeError)) {
860 // Any error that occurs before reaching ReadyStateHaveMetadata should
861 // be considered a format error.
862 network_state_ = WebMediaPlayer::NetworkStateFormatError;
863 } else {
864 network_state_ = state;
865 }
866 client_->networkStateChanged();
867 }
868
UpdateReadyState(WebMediaPlayer::ReadyState state)869 void WebMediaPlayerAndroid::UpdateReadyState(
870 WebMediaPlayer::ReadyState state) {
871 ready_state_ = state;
872 client_->readyStateChanged();
873 }
874
OnPlayerReleased()875 void WebMediaPlayerAndroid::OnPlayerReleased() {
876 // |needs_external_surface_| is always false on non-TV devices.
877 if (!needs_external_surface_)
878 needs_establish_peer_ = true;
879
880 #if defined(VIDEO_HOLE)
881 last_computed_rect_ = gfx::RectF();
882 #endif // defined(VIDEO_HOLE)
883 }
884
ReleaseMediaResources()885 void WebMediaPlayerAndroid::ReleaseMediaResources() {
886 switch (network_state_) {
887 // Pause the media player and inform WebKit if the player is in a good
888 // shape.
889 case WebMediaPlayer::NetworkStateIdle:
890 case WebMediaPlayer::NetworkStateLoading:
891 case WebMediaPlayer::NetworkStateLoaded:
892 pause(false);
893 client_->playbackStateChanged();
894 break;
895 // If a WebMediaPlayer instance has entered into one of these states,
896 // the internal network state in HTMLMediaElement could be set to empty.
897 // And calling playbackStateChanged() could get this object deleted.
898 case WebMediaPlayer::NetworkStateEmpty:
899 case WebMediaPlayer::NetworkStateFormatError:
900 case WebMediaPlayer::NetworkStateNetworkError:
901 case WebMediaPlayer::NetworkStateDecodeError:
902 break;
903 }
904 manager_->ReleaseResources(player_id_);
905 OnPlayerReleased();
906 }
907
WillDestroyCurrentMessageLoop()908 void WebMediaPlayerAndroid::WillDestroyCurrentMessageLoop() {
909 if (manager_)
910 manager_->UnregisterMediaPlayer(player_id_);
911 Detach();
912 }
913
Detach()914 void WebMediaPlayerAndroid::Detach() {
915 if (stream_id_) {
916 stream_texture_factory_->DestroyStreamTexture(texture_id_);
917 stream_id_ = 0;
918 }
919
920 media_source_delegate_.reset();
921 {
922 base::AutoLock auto_lock(current_frame_lock_);
923 current_frame_ = NULL;
924 }
925 is_remote_ = false;
926 manager_ = NULL;
927 }
928
DrawRemotePlaybackIcon()929 void WebMediaPlayerAndroid::DrawRemotePlaybackIcon() {
930 DCHECK(main_thread_checker_.CalledOnValidThread());
931 if (!video_weblayer_)
932 return;
933 blink::WebGraphicsContext3D* context = stream_texture_factory_->Context3d();
934 if (!context->makeContextCurrent())
935 return;
936
937 // TODO(johnme): Should redraw this frame if the layer bounds change; but
938 // there seems no easy way to listen for the layer resizing (as opposed to
939 // OnVideoSizeChanged, which is when the frame sizes of the video file
940 // change). Perhaps have to poll (on main thread of course)?
941 gfx::Size video_size_css_px = video_weblayer_->bounds();
942 float device_scale_factor = frame_->view()->deviceScaleFactor();
943 // canvas_size will be the size in device pixels when pageScaleFactor == 1
944 gfx::Size canvas_size(
945 static_cast<int>(video_size_css_px.width() * device_scale_factor),
946 static_cast<int>(video_size_css_px.height() * device_scale_factor));
947
948 SkBitmap bitmap;
949 bitmap.setConfig(
950 SkBitmap::kARGB_8888_Config, canvas_size.width(), canvas_size.height());
951 bitmap.allocPixels();
952
953 SkCanvas canvas(bitmap);
954 canvas.drawColor(SK_ColorBLACK);
955 SkPaint paint;
956 paint.setAntiAlias(true);
957 paint.setFilterLevel(SkPaint::kHigh_FilterLevel);
958 const SkBitmap* icon_bitmap =
959 content::GetContentClient()
960 ->GetNativeImageNamed(IDR_MEDIAPLAYER_REMOTE_PLAYBACK_ICON)
961 .ToSkBitmap();
962 // In order to get a reasonable margin around the icon:
963 // - the icon should be under half the frame width
964 // - the icon should be at most 3/5 of the frame height
965 // Additionally, on very large screens, the icon size should be capped. A max
966 // width of 320 was arbitrarily chosen; since this is half the resource's
967 // pixel width, it should look crisp even on 2x deviceScaleFactor displays.
968 int icon_width = 320;
969 icon_width = std::min(icon_width, canvas_size.width() / 2);
970 icon_width = std::min(icon_width,
971 canvas_size.height() * icon_bitmap->width() /
972 icon_bitmap->height() * 3 / 5);
973 int icon_height = icon_width * icon_bitmap->height() / icon_bitmap->width();
974 // Center the icon within the frame
975 SkRect icon_rect = SkRect::MakeXYWH((canvas_size.width() - icon_width) / 2,
976 (canvas_size.height() - icon_height) / 2,
977 icon_width,
978 icon_height);
979 canvas.drawBitmapRectToRect(
980 *icon_bitmap, NULL /* src */, icon_rect /* dest */, &paint);
981
982 if (!remote_playback_texture_id_)
983 remote_playback_texture_id_ = context->createTexture();
984 unsigned texture_target = GL_TEXTURE_2D;
985 context->bindTexture(texture_target, remote_playback_texture_id_);
986 context->texParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
987 context->texParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
988 context->texParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
989 context->texParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
990
991 {
992 SkAutoLockPixels lock(bitmap);
993 context->texImage2D(texture_target,
994 0 /* level */,
995 GL_RGBA /* internalformat */,
996 bitmap.width(),
997 bitmap.height(),
998 0 /* border */,
999 GL_RGBA /* format */,
1000 GL_UNSIGNED_BYTE /* type */,
1001 bitmap.getPixels());
1002 }
1003
1004 gpu::Mailbox texture_mailbox;
1005 context->genMailboxCHROMIUM(texture_mailbox.name);
1006 context->produceTextureCHROMIUM(texture_target, texture_mailbox.name);
1007 context->flush();
1008 unsigned texture_mailbox_sync_point = context->insertSyncPoint();
1009
1010 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture(
1011 make_scoped_ptr(new VideoFrame::MailboxHolder(
1012 texture_mailbox,
1013 texture_mailbox_sync_point,
1014 base::Bind(&WebMediaPlayerAndroid::OnReleaseRemotePlaybackTexture,
1015 main_loop_,
1016 weak_factory_.GetWeakPtr()))),
1017 texture_target,
1018 canvas_size /* coded_size */,
1019 gfx::Rect(canvas_size) /* visible_rect */,
1020 canvas_size /* natural_size */,
1021 base::TimeDelta() /* timestamp */,
1022 VideoFrame::ReadPixelsCB(),
1023 base::Closure() /* no_longer_needed_cb */);
1024 SetCurrentFrameInternal(new_frame);
1025 }
1026
ReallocateVideoFrame()1027 void WebMediaPlayerAndroid::ReallocateVideoFrame() {
1028 if (needs_external_surface_) {
1029 // VideoFrame::CreateHoleFrame is only defined under VIDEO_HOLE.
1030 #if defined(VIDEO_HOLE)
1031 if (!natural_size_.isEmpty()) {
1032 scoped_refptr<VideoFrame> new_frame =
1033 VideoFrame::CreateHoleFrame(natural_size_);
1034 SetCurrentFrameInternal(new_frame);
1035 // Force the client to grab the hole frame.
1036 client_->repaint();
1037 }
1038 #else
1039 NOTIMPLEMENTED() << "Hole punching not supported without VIDEO_HOLE flag";
1040 #endif // defined(VIDEO_HOLE)
1041 } else if (!is_remote_ && texture_id_) {
1042 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture(
1043 make_scoped_ptr(new VideoFrame::MailboxHolder(
1044 texture_mailbox_,
1045 texture_mailbox_sync_point_,
1046 VideoFrame::MailboxHolder::TextureNoLongerNeededCallback())),
1047 kGLTextureExternalOES,
1048 natural_size_,
1049 gfx::Rect(natural_size_),
1050 natural_size_,
1051 base::TimeDelta(),
1052 VideoFrame::ReadPixelsCB(),
1053 base::Closure());
1054 SetCurrentFrameInternal(new_frame);
1055 }
1056 }
1057
SetVideoFrameProviderClient(cc::VideoFrameProvider::Client * client)1058 void WebMediaPlayerAndroid::SetVideoFrameProviderClient(
1059 cc::VideoFrameProvider::Client* client) {
1060 // This is called from both the main renderer thread and the compositor
1061 // thread (when the main thread is blocked).
1062 if (video_frame_provider_client_)
1063 video_frame_provider_client_->StopUsingProvider();
1064 video_frame_provider_client_ = client;
1065
1066 // Set the callback target when a frame is produced.
1067 if (stream_texture_proxy_)
1068 stream_texture_proxy_->SetClient(client);
1069 }
1070
SetCurrentFrameInternal(scoped_refptr<media::VideoFrame> & video_frame)1071 void WebMediaPlayerAndroid::SetCurrentFrameInternal(
1072 scoped_refptr<media::VideoFrame>& video_frame) {
1073 base::AutoLock auto_lock(current_frame_lock_);
1074 current_frame_ = video_frame;
1075 }
1076
GetCurrentFrame()1077 scoped_refptr<media::VideoFrame> WebMediaPlayerAndroid::GetCurrentFrame() {
1078 scoped_refptr<VideoFrame> video_frame;
1079 {
1080 base::AutoLock auto_lock(current_frame_lock_);
1081 video_frame = current_frame_;
1082 }
1083
1084 if (!stream_texture_proxy_initialized_ && stream_texture_proxy_ &&
1085 stream_id_ && !needs_external_surface_ && !is_remote_) {
1086 gfx::Size natural_size = video_frame->natural_size();
1087 // TODO(sievers): These variables are accessed on the wrong thread here.
1088 stream_texture_proxy_->BindToCurrentThread(stream_id_);
1089 stream_texture_factory_->SetStreamTextureSize(stream_id_, natural_size);
1090 stream_texture_proxy_initialized_ = true;
1091 cached_stream_texture_size_ = natural_size;
1092 }
1093
1094 return video_frame;
1095 }
1096
PutCurrentFrame(const scoped_refptr<media::VideoFrame> & frame)1097 void WebMediaPlayerAndroid::PutCurrentFrame(
1098 const scoped_refptr<media::VideoFrame>& frame) {
1099 }
1100
TryCreateStreamTextureProxyIfNeeded()1101 void WebMediaPlayerAndroid::TryCreateStreamTextureProxyIfNeeded() {
1102 // Already created.
1103 if (stream_texture_proxy_)
1104 return;
1105
1106 // No factory to create proxy.
1107 if (!stream_texture_factory_)
1108 return;
1109
1110 stream_texture_proxy_.reset(stream_texture_factory_->CreateProxy());
1111 if (needs_establish_peer_ && stream_texture_proxy_) {
1112 DoCreateStreamTexture();
1113 ReallocateVideoFrame();
1114 }
1115
1116 if (stream_texture_proxy_ && video_frame_provider_client_)
1117 stream_texture_proxy_->SetClient(video_frame_provider_client_);
1118 }
1119
EstablishSurfaceTexturePeer()1120 void WebMediaPlayerAndroid::EstablishSurfaceTexturePeer() {
1121 if (!stream_texture_proxy_)
1122 return;
1123
1124 if (media_source_delegate_ && stream_texture_factory_) {
1125 // MediaCodec will release the old surface when it goes away, we need to
1126 // recreate a new one each time this is called.
1127 stream_texture_factory_->DestroyStreamTexture(texture_id_);
1128 stream_id_ = 0;
1129 texture_id_ = 0;
1130 texture_mailbox_ = gpu::Mailbox();
1131 texture_mailbox_sync_point_ = 0;
1132 DoCreateStreamTexture();
1133 ReallocateVideoFrame();
1134 stream_texture_proxy_initialized_ = false;
1135 }
1136 if (stream_texture_factory_.get() && stream_id_)
1137 stream_texture_factory_->EstablishPeer(stream_id_, player_id_);
1138 needs_establish_peer_ = false;
1139 }
1140
DoCreateStreamTexture()1141 void WebMediaPlayerAndroid::DoCreateStreamTexture() {
1142 DCHECK(!stream_id_);
1143 DCHECK(!texture_id_);
1144 DCHECK(!texture_mailbox_sync_point_);
1145 stream_id_ = stream_texture_factory_->CreateStreamTexture(
1146 kGLTextureExternalOES,
1147 &texture_id_,
1148 &texture_mailbox_,
1149 &texture_mailbox_sync_point_);
1150 }
1151
SetNeedsEstablishPeer(bool needs_establish_peer)1152 void WebMediaPlayerAndroid::SetNeedsEstablishPeer(bool needs_establish_peer) {
1153 needs_establish_peer_ = needs_establish_peer;
1154 }
1155
UpdatePlayingState(bool is_playing)1156 void WebMediaPlayerAndroid::UpdatePlayingState(bool is_playing) {
1157 is_playing_ = is_playing;
1158 if (!delegate_)
1159 return;
1160 if (is_playing)
1161 delegate_->DidPlay(this);
1162 else
1163 delegate_->DidPause(this);
1164 }
1165
1166 #if defined(VIDEO_HOLE)
RetrieveGeometryChange(gfx::RectF * rect)1167 bool WebMediaPlayerAndroid::RetrieveGeometryChange(gfx::RectF* rect) {
1168 if (!video_weblayer_)
1169 return false;
1170
1171 // Compute the geometry of video frame layer.
1172 cc::Layer* layer = video_weblayer_->layer();
1173 rect->set_size(layer->bounds());
1174 while (layer) {
1175 rect->Offset(layer->position().OffsetFromOrigin());
1176 layer = layer->parent();
1177 }
1178
1179 // Return false when the geometry hasn't been changed from the last time.
1180 if (last_computed_rect_ == *rect)
1181 return false;
1182
1183 // Store the changed geometry information when it is actually changed.
1184 last_computed_rect_ = *rect;
1185 return true;
1186 }
1187 #endif
1188
1189 // The following EME related code is copied from WebMediaPlayerImpl.
1190 // TODO(xhwang): Remove duplicate code between WebMediaPlayerAndroid and
1191 // WebMediaPlayerImpl.
1192 // TODO(kjyoun): Update Google TV EME implementation to use IPC.
1193
1194 // Helper functions to report media EME related stats to UMA. They follow the
1195 // convention of more commonly used macros UMA_HISTOGRAM_ENUMERATION and
1196 // UMA_HISTOGRAM_COUNTS. The reason that we cannot use those macros directly is
1197 // that UMA_* macros require the names to be constant throughout the process'
1198 // lifetime.
EmeUMAHistogramEnumeration(const blink::WebString & key_system,const std::string & method,int sample,int boundary_value)1199 static void EmeUMAHistogramEnumeration(const blink::WebString& key_system,
1200 const std::string& method,
1201 int sample,
1202 int boundary_value) {
1203 base::LinearHistogram::FactoryGet(
1204 kMediaEme + KeySystemNameForUMA(key_system) + "." + method,
1205 1, boundary_value, boundary_value + 1,
1206 base::Histogram::kUmaTargetedHistogramFlag)->Add(sample);
1207 }
1208
EmeUMAHistogramCounts(const blink::WebString & key_system,const std::string & method,int sample)1209 static void EmeUMAHistogramCounts(const blink::WebString& key_system,
1210 const std::string& method,
1211 int sample) {
1212 // Use the same parameters as UMA_HISTOGRAM_COUNTS.
1213 base::Histogram::FactoryGet(
1214 kMediaEme + KeySystemNameForUMA(key_system) + "." + method,
1215 1, 1000000, 50, base::Histogram::kUmaTargetedHistogramFlag)->Add(sample);
1216 }
1217
1218 // Helper enum for reporting generateKeyRequest/addKey histograms.
1219 enum MediaKeyException {
1220 kUnknownResultId,
1221 kSuccess,
1222 kKeySystemNotSupported,
1223 kInvalidPlayerState,
1224 kMaxMediaKeyException
1225 };
1226
MediaKeyExceptionForUMA(WebMediaPlayer::MediaKeyException e)1227 static MediaKeyException MediaKeyExceptionForUMA(
1228 WebMediaPlayer::MediaKeyException e) {
1229 switch (e) {
1230 case WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported:
1231 return kKeySystemNotSupported;
1232 case WebMediaPlayer::MediaKeyExceptionInvalidPlayerState:
1233 return kInvalidPlayerState;
1234 case WebMediaPlayer::MediaKeyExceptionNoError:
1235 return kSuccess;
1236 default:
1237 return kUnknownResultId;
1238 }
1239 }
1240
1241 // Helper for converting |key_system| name and exception |e| to a pair of enum
1242 // values from above, for reporting to UMA.
ReportMediaKeyExceptionToUMA(const std::string & method,const WebString & key_system,WebMediaPlayer::MediaKeyException e)1243 static void ReportMediaKeyExceptionToUMA(
1244 const std::string& method,
1245 const WebString& key_system,
1246 WebMediaPlayer::MediaKeyException e) {
1247 MediaKeyException result_id = MediaKeyExceptionForUMA(e);
1248 DCHECK_NE(result_id, kUnknownResultId) << e;
1249 EmeUMAHistogramEnumeration(
1250 key_system, method, result_id, kMaxMediaKeyException);
1251 }
1252
generateKeyRequest(const WebString & key_system,const unsigned char * init_data,unsigned init_data_length)1253 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::generateKeyRequest(
1254 const WebString& key_system,
1255 const unsigned char* init_data,
1256 unsigned init_data_length) {
1257 WebMediaPlayer::MediaKeyException e =
1258 GenerateKeyRequestInternal(key_system, init_data, init_data_length);
1259 ReportMediaKeyExceptionToUMA("generateKeyRequest", key_system, e);
1260 return e;
1261 }
1262
IsKeySystemSupported(const WebString & key_system)1263 bool WebMediaPlayerAndroid::IsKeySystemSupported(const WebString& key_system) {
1264 // On Android, EME only works with MSE.
1265 return player_type_ == MEDIA_PLAYER_TYPE_MEDIA_SOURCE &&
1266 IsConcreteSupportedKeySystem(key_system);
1267 }
1268
1269 WebMediaPlayer::MediaKeyException
GenerateKeyRequestInternal(const WebString & key_system,const unsigned char * init_data,unsigned init_data_length)1270 WebMediaPlayerAndroid::GenerateKeyRequestInternal(
1271 const WebString& key_system,
1272 const unsigned char* init_data,
1273 unsigned init_data_length) {
1274 DVLOG(1) << "generateKeyRequest: " << key_system.utf8().data() << ": "
1275 << std::string(reinterpret_cast<const char*>(init_data),
1276 static_cast<size_t>(init_data_length));
1277
1278 if (!IsKeySystemSupported(key_system))
1279 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1280
1281 // We do not support run-time switching between key systems for now.
1282 if (current_key_system_.isEmpty()) {
1283 if (!decryptor_->InitializeCDM(key_system.utf8(), frame_->document().url()))
1284 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1285 current_key_system_ = key_system;
1286 } else if (key_system != current_key_system_) {
1287 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1288 }
1289
1290 // TODO(xhwang): We assume all streams are from the same container (thus have
1291 // the same "type") for now. In the future, the "type" should be passed down
1292 // from the application.
1293 if (!decryptor_->GenerateKeyRequest(init_data_type_,
1294 init_data, init_data_length)) {
1295 current_key_system_.reset();
1296 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1297 }
1298
1299 return WebMediaPlayer::MediaKeyExceptionNoError;
1300 }
1301
addKey(const WebString & key_system,const unsigned char * key,unsigned key_length,const unsigned char * init_data,unsigned init_data_length,const WebString & session_id)1302 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::addKey(
1303 const WebString& key_system,
1304 const unsigned char* key,
1305 unsigned key_length,
1306 const unsigned char* init_data,
1307 unsigned init_data_length,
1308 const WebString& session_id) {
1309 WebMediaPlayer::MediaKeyException e = AddKeyInternal(
1310 key_system, key, key_length, init_data, init_data_length, session_id);
1311 ReportMediaKeyExceptionToUMA("addKey", key_system, e);
1312 return e;
1313 }
1314
AddKeyInternal(const WebString & key_system,const unsigned char * key,unsigned key_length,const unsigned char * init_data,unsigned init_data_length,const WebString & session_id)1315 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::AddKeyInternal(
1316 const WebString& key_system,
1317 const unsigned char* key,
1318 unsigned key_length,
1319 const unsigned char* init_data,
1320 unsigned init_data_length,
1321 const WebString& session_id) {
1322 DCHECK(key);
1323 DCHECK_GT(key_length, 0u);
1324 DVLOG(1) << "addKey: " << key_system.utf8().data() << ": "
1325 << std::string(reinterpret_cast<const char*>(key),
1326 static_cast<size_t>(key_length)) << ", "
1327 << std::string(reinterpret_cast<const char*>(init_data),
1328 static_cast<size_t>(init_data_length))
1329 << " [" << session_id.utf8().data() << "]";
1330
1331 if (!IsKeySystemSupported(key_system))
1332 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1333
1334 if (current_key_system_.isEmpty() || key_system != current_key_system_)
1335 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1336
1337 decryptor_->AddKey(key, key_length, init_data, init_data_length,
1338 session_id.utf8());
1339 return WebMediaPlayer::MediaKeyExceptionNoError;
1340 }
1341
cancelKeyRequest(const WebString & key_system,const WebString & session_id)1342 WebMediaPlayer::MediaKeyException WebMediaPlayerAndroid::cancelKeyRequest(
1343 const WebString& key_system,
1344 const WebString& session_id) {
1345 WebMediaPlayer::MediaKeyException e =
1346 CancelKeyRequestInternal(key_system, session_id);
1347 ReportMediaKeyExceptionToUMA("cancelKeyRequest", key_system, e);
1348 return e;
1349 }
1350
1351 WebMediaPlayer::MediaKeyException
CancelKeyRequestInternal(const WebString & key_system,const WebString & session_id)1352 WebMediaPlayerAndroid::CancelKeyRequestInternal(
1353 const WebString& key_system,
1354 const WebString& session_id) {
1355 if (!IsKeySystemSupported(key_system))
1356 return WebMediaPlayer::MediaKeyExceptionKeySystemNotSupported;
1357
1358 if (current_key_system_.isEmpty() || key_system != current_key_system_)
1359 return WebMediaPlayer::MediaKeyExceptionInvalidPlayerState;
1360
1361 decryptor_->CancelKeyRequest(session_id.utf8());
1362 return WebMediaPlayer::MediaKeyExceptionNoError;
1363 }
1364
OnKeyAdded(const std::string & session_id)1365 void WebMediaPlayerAndroid::OnKeyAdded(const std::string& session_id) {
1366 EmeUMAHistogramCounts(current_key_system_, "KeyAdded", 1);
1367
1368 #if defined(GOOGLE_TV)
1369 if (media_source_delegate_)
1370 media_source_delegate_->NotifyKeyAdded(current_key_system_.utf8());
1371 #endif // defined(GOOGLE_TV)
1372
1373 client_->keyAdded(current_key_system_, WebString::fromUTF8(session_id));
1374 }
1375
OnKeyError(const std::string & session_id,media::MediaKeys::KeyError error_code,int system_code)1376 void WebMediaPlayerAndroid::OnKeyError(const std::string& session_id,
1377 media::MediaKeys::KeyError error_code,
1378 int system_code) {
1379 EmeUMAHistogramEnumeration(current_key_system_, "KeyError",
1380 error_code, media::MediaKeys::kMaxKeyError);
1381
1382 client_->keyError(
1383 current_key_system_,
1384 WebString::fromUTF8(session_id),
1385 static_cast<blink::WebMediaPlayerClient::MediaKeyErrorCode>(error_code),
1386 system_code);
1387 }
1388
OnKeyMessage(const std::string & session_id,const std::vector<uint8> & message,const std::string & destination_url)1389 void WebMediaPlayerAndroid::OnKeyMessage(const std::string& session_id,
1390 const std::vector<uint8>& message,
1391 const std::string& destination_url) {
1392 const GURL destination_url_gurl(destination_url);
1393 DLOG_IF(WARNING, !destination_url.empty() && !destination_url_gurl.is_valid())
1394 << "Invalid URL in destination_url: " << destination_url;
1395
1396 client_->keyMessage(current_key_system_,
1397 WebString::fromUTF8(session_id),
1398 message.empty() ? NULL : &message[0],
1399 message.size(),
1400 destination_url_gurl);
1401 }
1402
OnMediaSourceOpened(blink::WebMediaSource * web_media_source)1403 void WebMediaPlayerAndroid::OnMediaSourceOpened(
1404 blink::WebMediaSource* web_media_source) {
1405 client_->mediaSourceOpened(web_media_source);
1406 }
1407
OnNeedKey(const std::string & type,const std::vector<uint8> & init_data)1408 void WebMediaPlayerAndroid::OnNeedKey(const std::string& type,
1409 const std::vector<uint8>& init_data) {
1410 DCHECK(main_loop_->BelongsToCurrentThread());
1411 // Do not fire NeedKey event if encrypted media is not enabled.
1412 if (!blink::WebRuntimeFeatures::isEncryptedMediaEnabled() &&
1413 !blink::WebRuntimeFeatures::isPrefixedEncryptedMediaEnabled()) {
1414 return;
1415 }
1416
1417 UMA_HISTOGRAM_COUNTS(kMediaEme + std::string("NeedKey"), 1);
1418
1419 DCHECK(init_data_type_.empty() || type.empty() || type == init_data_type_);
1420 if (init_data_type_.empty())
1421 init_data_type_ = type;
1422
1423 const uint8* init_data_ptr = init_data.empty() ? NULL : &init_data[0];
1424 // TODO(xhwang): Drop |keySystem| and |sessionId| in keyNeeded() call.
1425 client_->keyNeeded(WebString(),
1426 WebString(),
1427 init_data_ptr,
1428 init_data.size());
1429 }
1430
1431 #if defined(GOOGLE_TV)
InjectMediaStream(MediaStreamClient * media_stream_client,media::Demuxer * demuxer,const base::Closure & destroy_demuxer_cb)1432 bool WebMediaPlayerAndroid::InjectMediaStream(
1433 MediaStreamClient* media_stream_client,
1434 media::Demuxer* demuxer,
1435 const base::Closure& destroy_demuxer_cb) {
1436 DCHECK(!demuxer);
1437 media_stream_client_ = media_stream_client;
1438 demuxer_ = demuxer;
1439 destroy_demuxer_cb_ = destroy_demuxer_cb;
1440 return true;
1441 }
1442 #endif
1443
DoReleaseRemotePlaybackTexture(uint32 sync_point)1444 void WebMediaPlayerAndroid::DoReleaseRemotePlaybackTexture(uint32 sync_point) {
1445 DCHECK(main_thread_checker_.CalledOnValidThread());
1446 DCHECK(remote_playback_texture_id_);
1447
1448 blink::WebGraphicsContext3D* context =
1449 stream_texture_factory_->Context3d();
1450
1451 if (sync_point)
1452 context->waitSyncPoint(sync_point);
1453 context->deleteTexture(remote_playback_texture_id_);
1454 remote_playback_texture_id_ = 0;
1455 }
1456
enterFullscreen()1457 void WebMediaPlayerAndroid::enterFullscreen() {
1458 if (manager_->CanEnterFullscreen(frame_)) {
1459 manager_->EnterFullscreen(player_id_, frame_);
1460 SetNeedsEstablishPeer(false);
1461 }
1462 }
1463
exitFullscreen()1464 void WebMediaPlayerAndroid::exitFullscreen() {
1465 manager_->ExitFullscreen(player_id_);
1466 }
1467
canEnterFullscreen() const1468 bool WebMediaPlayerAndroid::canEnterFullscreen() const {
1469 return manager_->CanEnterFullscreen(frame_);
1470 }
1471
1472 } // namespace content
1473