OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/blink/webmediaplayer_impl.h" | 5 #include "media/blink/webmediaplayer_impl.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <cmath> | 8 #include <cmath> |
9 #include <limits> | 9 #include <limits> |
10 #include <string> | 10 #include <string> |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
52 #include "third_party/WebKit/public/platform/WebSize.h" | 52 #include "third_party/WebKit/public/platform/WebSize.h" |
53 #include "third_party/WebKit/public/platform/WebString.h" | 53 #include "third_party/WebKit/public/platform/WebString.h" |
54 #include "third_party/WebKit/public/platform/WebURL.h" | 54 #include "third_party/WebKit/public/platform/WebURL.h" |
55 #include "third_party/WebKit/public/web/WebDocument.h" | 55 #include "third_party/WebKit/public/web/WebDocument.h" |
56 #include "third_party/WebKit/public/web/WebFrame.h" | 56 #include "third_party/WebKit/public/web/WebFrame.h" |
57 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 57 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
58 #include "third_party/WebKit/public/web/WebRuntimeFeatures.h" | 58 #include "third_party/WebKit/public/web/WebRuntimeFeatures.h" |
59 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" | 59 #include "third_party/WebKit/public/web/WebSecurityOrigin.h" |
60 #include "third_party/WebKit/public/web/WebView.h" | 60 #include "third_party/WebKit/public/web/WebView.h" |
61 | 61 |
62 #if defined(WIMPI_CAST) | |
63 #include "gpu/GLES2/gl2extchromium.h" | |
64 #include "gpu/command_buffer/client/gles2_interface.h" | |
65 #include "media/base/android/media_common_android.h" | |
66 #include "third_party/skia/include/core/SkCanvas.h" | |
67 #include "third_party/skia/include/core/SkPaint.h" | |
68 #include "third_party/skia/include/core/SkTypeface.h" | |
69 #include "third_party/skia/include/gpu/GrContext.h" | |
70 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
71 #endif | |
72 | |
62 using blink::WebCanvas; | 73 using blink::WebCanvas; |
63 using blink::WebMediaPlayer; | 74 using blink::WebMediaPlayer; |
64 using blink::WebRect; | 75 using blink::WebRect; |
65 using blink::WebSize; | 76 using blink::WebSize; |
66 using blink::WebString; | 77 using blink::WebString; |
78 using gpu::gles2::GLES2Interface; | |
79 | |
80 namespace media { | |
67 | 81 |
68 namespace { | 82 namespace { |
69 | 83 |
70 // Limits the range of playback rate. | 84 // Limits the range of playback rate. |
71 // | 85 // |
72 // TODO(kylep): Revisit these. | 86 // TODO(kylep): Revisit these. |
73 // | 87 // |
74 // Vista has substantially lower performance than XP or Windows7. If you speed | 88 // Vista has substantially lower performance than XP or Windows7. If you speed |
75 // up a video too much, it can't keep up, and rendering stops updating except on | 89 // up a video too much, it can't keep up, and rendering stops updating except on |
76 // the time bar. For really high speeds, audio becomes a bottleneck and we just | 90 // the time bar. For really high speeds, audio becomes a bottleneck and we just |
77 // use up the data we have, which may not achieve the speed requested, but will | 91 // use up the data we have, which may not achieve the speed requested, but will |
78 // not crash the tab. | 92 // not crash the tab. |
79 // | 93 // |
80 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems | 94 // A very slow speed, ie 0.00000001x, causes the machine to lock up. (It seems |
81 // like a busy loop). It gets unresponsive, although its not completely dead. | 95 // like a busy loop). It gets unresponsive, although its not completely dead. |
82 // | 96 // |
83 // Also our timers are not very accurate (especially for ogg), which becomes | 97 // Also our timers are not very accurate (especially for ogg), which becomes |
84 // evident at low speeds and on Vista. Since other speeds are risky and outside | 98 // evident at low speeds and on Vista. Since other speeds are risky and outside |
85 // the norms, we think 1/16x to 16x is a safe and useful range for now. | 99 // the norms, we think 1/16x to 16x is a safe and useful range for now. |
86 const double kMinRate = 0.0625; | 100 const double kMinRate = 0.0625; |
87 const double kMaxRate = 16.0; | 101 const double kMaxRate = 16.0; |
88 | 102 |
89 void SetSinkIdOnMediaThread( | 103 void SetSinkIdOnMediaThread(scoped_refptr<WebAudioSourceProviderImpl> sink, |
90 scoped_refptr<media::WebAudioSourceProviderImpl> sink, | 104 const std::string& device_id, |
91 const std::string& device_id, | 105 const url::Origin& security_origin, |
92 const url::Origin& security_origin, | 106 const SwitchOutputDeviceCB& callback) { |
93 const media::SwitchOutputDeviceCB& callback) { | |
94 if (sink->GetOutputDevice()) { | 107 if (sink->GetOutputDevice()) { |
95 sink->GetOutputDevice()->SwitchOutputDevice(device_id, security_origin, | 108 sink->GetOutputDevice()->SwitchOutputDevice(device_id, security_origin, |
96 callback); | 109 callback); |
97 } else { | 110 } else { |
98 callback.Run(media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL); | 111 callback.Run(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL); |
99 } | 112 } |
100 } | 113 } |
101 | 114 |
115 #if defined(WIMPI_CAST) | |
116 // File-static function is to allow it to run even after WMPA is deleted. | |
117 void OnReleaseTexture(const WebMediaPlayerParams::Context3DCB& context_3d_cb, | |
118 GLuint texture_id, | |
119 const gpu::SyncToken& sync_token) { | |
120 Context3D context_3d; | |
121 if (!context_3d_cb.is_null()) | |
122 context_3d = context_3d_cb.Run(); | |
123 // GPU Process crashed. | |
124 if (!context_3d.gl) | |
125 return; | |
126 | |
127 GLES2Interface* gl = context_3d.gl; | |
128 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
129 gl->DeleteTextures(1, &texture_id); | |
130 // Flush to ensure that the stream texture gets deleted in a timely fashion. | |
131 gl->ShallowFlushCHROMIUM(); | |
132 } | |
133 #endif // WIMPI_CAST | |
134 | |
102 } // namespace | 135 } // namespace |
103 | 136 |
104 namespace media { | |
105 | |
106 class BufferedDataSourceHostImpl; | 137 class BufferedDataSourceHostImpl; |
107 | 138 |
108 #define STATIC_ASSERT_MATCHING_ENUM(name, name2) \ | 139 #define STATIC_ASSERT_MATCHING_ENUM(name, name2) \ |
109 static_assert(static_cast<int>(WebMediaPlayer::CORSMode##name) == \ | 140 static_assert(static_cast<int>(WebMediaPlayer::CORSMode##name) == \ |
110 static_cast<int>(UrlData::name2), \ | 141 static_cast<int>(UrlData::name2), \ |
111 "mismatching enum values: " #name) | 142 "mismatching enum values: " #name) |
112 STATIC_ASSERT_MATCHING_ENUM(Unspecified, CORS_UNSPECIFIED); | 143 STATIC_ASSERT_MATCHING_ENUM(Unspecified, CORS_UNSPECIFIED); |
113 STATIC_ASSERT_MATCHING_ENUM(Anonymous, CORS_ANONYMOUS); | 144 STATIC_ASSERT_MATCHING_ENUM(Anonymous, CORS_ANONYMOUS); |
114 STATIC_ASSERT_MATCHING_ENUM(UseCredentials, CORS_USE_CREDENTIALS); | 145 STATIC_ASSERT_MATCHING_ENUM(UseCredentials, CORS_USE_CREDENTIALS); |
115 #undef STATIC_ASSERT_MATCHING_ENUM | 146 #undef STATIC_ASSERT_MATCHING_ENUM |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
200 // e.g. GetCurrentFrameFromCompositor(). See http://crbug.com/434861 | 231 // e.g. GetCurrentFrameFromCompositor(). See http://crbug.com/434861 |
201 | 232 |
202 // Use the null sink if no sink was provided. | 233 // Use the null sink if no sink was provided. |
203 audio_source_provider_ = new WebAudioSourceProviderImpl( | 234 audio_source_provider_ = new WebAudioSourceProviderImpl( |
204 params.audio_renderer_sink().get() | 235 params.audio_renderer_sink().get() |
205 ? params.audio_renderer_sink() | 236 ? params.audio_renderer_sink() |
206 : new NullAudioSink(media_task_runner_)); | 237 : new NullAudioSink(media_task_runner_)); |
207 } | 238 } |
208 | 239 |
209 WebMediaPlayerImpl::~WebMediaPlayerImpl() { | 240 WebMediaPlayerImpl::~WebMediaPlayerImpl() { |
241 #if defined(WIMPI_CAST) | |
242 if (player_manager_) { | |
243 if (is_player_initialized_) | |
244 player_manager_->DestroyPlayer(player_id_); | |
245 | |
246 player_manager_->UnregisterMediaPlayer(player_id_); | |
247 } | |
248 #endif | |
249 | |
210 client_->setWebLayer(NULL); | 250 client_->setWebLayer(NULL); |
211 | 251 |
212 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 252 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
213 | 253 |
214 if (delegate_) { | 254 if (delegate_) { |
215 delegate_->RemoveObserver(this); | 255 delegate_->RemoveObserver(this); |
216 delegate_->PlayerGone(this); | 256 delegate_->PlayerGone(this); |
217 } | 257 } |
218 | 258 |
219 // Abort any pending IO so stopping the pipeline doesn't get blocked. | 259 // Abort any pending IO so stopping the pipeline doesn't get blocked. |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
293 data_source_.reset(new BufferedDataSource( | 333 data_source_.reset(new BufferedDataSource( |
294 url, static_cast<BufferedResourceLoader::CORSMode>(cors_mode), | 334 url, static_cast<BufferedResourceLoader::CORSMode>(cors_mode), |
295 main_task_runner_, frame_, media_log_.get(), | 335 main_task_runner_, frame_, media_log_.get(), |
296 &buffered_data_source_host_, | 336 &buffered_data_source_host_, |
297 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr()))); | 337 base::Bind(&WebMediaPlayerImpl::NotifyDownloading, AsWeakPtr()))); |
298 } | 338 } |
299 data_source_->SetPreload(preload_); | 339 data_source_->SetPreload(preload_); |
300 data_source_->SetBufferingStrategy(buffering_strategy_); | 340 data_source_->SetBufferingStrategy(buffering_strategy_); |
301 data_source_->Initialize( | 341 data_source_->Initialize( |
302 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); | 342 base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); |
343 | |
344 #if defined(WIMPI_CAST) | |
345 player_manager_->Initialize(MEDIA_PLAYER_TYPE_URL, player_id_, url, | |
346 frame_->document().firstPartyForCookies(), 0, | |
347 frame_->document().url(), true); | |
348 | |
349 is_player_initialized_ = true; | |
350 #endif // defined(WIMPI_CAST) | |
303 } | 351 } |
304 | 352 |
305 void WebMediaPlayerImpl::play() { | 353 void WebMediaPlayerImpl::play() { |
306 DVLOG(1) << __FUNCTION__; | 354 DVLOG(1) << __FUNCTION__; |
307 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 355 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
308 | 356 |
357 #if defined(WIMPI_CAST) | |
358 if (is_remote_ && paused_) { | |
359 player_manager_->Start(player_id_); | |
360 remote_time_at_ = base::TimeTicks::Now(); | |
361 paused_ = false; | |
362 return; | |
363 } | |
364 #endif | |
365 | |
309 paused_ = false; | 366 paused_ = false; |
367 | |
310 pipeline_.SetPlaybackRate(playback_rate_); | 368 pipeline_.SetPlaybackRate(playback_rate_); |
311 if (data_source_) | 369 if (data_source_) |
312 data_source_->MediaIsPlaying(); | 370 data_source_->MediaIsPlaying(); |
313 | 371 |
314 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY)); | 372 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY)); |
315 | 373 |
316 if (delegate_ && playback_rate_ > 0) | 374 if (delegate_ && playback_rate_ > 0) |
317 NotifyPlaybackStarted(); | 375 NotifyPlaybackStarted(); |
318 } | 376 } |
319 | 377 |
320 void WebMediaPlayerImpl::pause() { | 378 void WebMediaPlayerImpl::pause() { |
321 DVLOG(1) << __FUNCTION__; | 379 DVLOG(1) << __FUNCTION__; |
322 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 380 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
323 | 381 |
324 const bool was_already_paused = paused_ || playback_rate_ == 0; | 382 const bool was_already_paused = paused_ || playback_rate_ == 0; |
325 paused_ = true; | 383 paused_ = true; |
384 | |
385 #if defined(WIMPI_CAST) | |
386 if (is_remote_) { | |
387 player_manager_->Pause(player_id_, true); | |
388 return; | |
389 } | |
390 #endif | |
391 | |
326 pipeline_.SetPlaybackRate(0.0); | 392 pipeline_.SetPlaybackRate(0.0); |
327 UpdatePausedTime(); | 393 UpdatePausedTime(); |
328 | 394 |
329 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE)); | 395 media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE)); |
330 | 396 |
331 if (!was_already_paused && delegate_) | 397 if (!was_already_paused && delegate_) |
332 NotifyPlaybackPaused(); | 398 NotifyPlaybackPaused(); |
333 } | 399 } |
334 | 400 |
335 bool WebMediaPlayerImpl::supportsSave() const { | 401 bool WebMediaPlayerImpl::supportsSave() const { |
336 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 402 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
337 return supports_save_; | 403 return supports_save_; |
338 } | 404 } |
339 | 405 |
340 void WebMediaPlayerImpl::seek(double seconds) { | 406 void WebMediaPlayerImpl::seek(double seconds) { |
341 DVLOG(1) << __FUNCTION__ << "(" << seconds << "s)"; | 407 DVLOG(1) << __FUNCTION__ << "(" << seconds << "s)"; |
342 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 408 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
343 | 409 |
344 ended_ = false; | 410 ended_ = false; |
345 | 411 |
412 base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds); | |
413 | |
414 #if defined(WIMPI_CAST) | |
415 if (is_remote_) { | |
416 should_notify_time_changed_ = true; | |
417 player_manager_->Seek(player_id_, new_seek_time); | |
418 return; | |
419 } | |
420 #endif | |
421 | |
346 ReadyState old_state = ready_state_; | 422 ReadyState old_state = ready_state_; |
347 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) | 423 if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) |
348 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | 424 SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
349 | 425 |
350 base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds); | |
351 | |
352 if (seeking_ || suspended_) { | 426 if (seeking_ || suspended_) { |
353 // Once resuming, it's too late to change the resume time and so the | 427 // Once resuming, it's too late to change the resume time and so the |
354 // implementation is a little different. | 428 // implementation is a little different. |
355 bool is_suspended = suspended_ && !resuming_; | 429 bool is_suspended = suspended_ && !resuming_; |
356 | 430 |
357 // If we are currently seeking or resuming to |new_seek_time|, skip the | 431 // If we are currently seeking or resuming to |new_seek_time|, skip the |
358 // seek (except for MSE, which always seeks). | 432 // seek (except for MSE, which always seeks). |
359 if (!is_suspended && new_seek_time == seek_time_) { | 433 if (!is_suspended && new_seek_time == seek_time_) { |
360 if (chunk_demuxer_) { | 434 if (chunk_demuxer_) { |
361 // Don't suppress any redundant in-progress MSE seek. There could have | 435 // Don't suppress any redundant in-progress MSE seek. There could have |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
415 return; | 489 return; |
416 } | 490 } |
417 } | 491 } |
418 | 492 |
419 seeking_ = true; | 493 seeking_ = true; |
420 seek_time_ = new_seek_time; | 494 seek_time_ = new_seek_time; |
421 | 495 |
422 if (chunk_demuxer_) | 496 if (chunk_demuxer_) |
423 chunk_demuxer_->StartWaitingForSeek(seek_time_); | 497 chunk_demuxer_->StartWaitingForSeek(seek_time_); |
424 | 498 |
425 // Kick off the asynchronous seek! | |
426 pipeline_.Seek(seek_time_, BIND_TO_RENDER_LOOP1( | 499 pipeline_.Seek(seek_time_, BIND_TO_RENDER_LOOP1( |
427 &WebMediaPlayerImpl::OnPipelineSeeked, true)); | 500 &WebMediaPlayerImpl::OnPipelineSeeked, true)); |
428 } | 501 } |
429 | 502 |
430 void WebMediaPlayerImpl::setRate(double rate) { | 503 void WebMediaPlayerImpl::setRate(double rate) { |
431 DVLOG(1) << __FUNCTION__ << "(" << rate << ")"; | 504 DVLOG(1) << __FUNCTION__ << "(" << rate << ")"; |
432 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 505 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
433 | 506 |
434 // TODO(kylep): Remove when support for negatives is added. Also, modify the | 507 // TODO(kylep): Remove when support for negatives is added. Also, modify the |
435 // following checks so rewind uses reasonable values also. | 508 // following checks so rewind uses reasonable values also. |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
532 | 605 |
533 blink::WebSize WebMediaPlayerImpl::naturalSize() const { | 606 blink::WebSize WebMediaPlayerImpl::naturalSize() const { |
534 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 607 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
535 | 608 |
536 return blink::WebSize(pipeline_metadata_.natural_size); | 609 return blink::WebSize(pipeline_metadata_.natural_size); |
537 } | 610 } |
538 | 611 |
539 bool WebMediaPlayerImpl::paused() const { | 612 bool WebMediaPlayerImpl::paused() const { |
540 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 613 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
541 | 614 |
615 #if defined(WIMPI_CAST) | |
616 if (is_remote_) | |
617 return paused_; | |
618 #endif | |
542 return pipeline_.GetPlaybackRate() == 0.0f; | 619 return pipeline_.GetPlaybackRate() == 0.0f; |
543 } | 620 } |
544 | 621 |
545 bool WebMediaPlayerImpl::seeking() const { | 622 bool WebMediaPlayerImpl::seeking() const { |
546 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 623 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
547 | 624 |
548 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) | 625 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) |
549 return false; | 626 return false; |
550 | 627 |
551 return seeking_; | 628 return seeking_; |
(...skipping 27 matching lines...) Expand all Loading... | |
579 return duration(); | 656 return duration(); |
580 | 657 |
581 // We know the current seek time better than pipeline: pipeline may processing | 658 // We know the current seek time better than pipeline: pipeline may processing |
582 // an earlier seek before a pending seek has been started, or it might not yet | 659 // an earlier seek before a pending seek has been started, or it might not yet |
583 // have the current seek time returnable via GetMediaTime(). | 660 // have the current seek time returnable via GetMediaTime(). |
584 if (seeking()) { | 661 if (seeking()) { |
585 return pending_seek_ ? pending_seek_time_.InSecondsF() | 662 return pending_seek_ ? pending_seek_time_.InSecondsF() |
586 : seek_time_.InSecondsF(); | 663 : seek_time_.InSecondsF(); |
587 } | 664 } |
588 | 665 |
589 return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF(); | 666 #if defined(WIMPI_CAST) |
667 if (is_remote_) { | |
668 base::TimeDelta ret = remote_time_; | |
669 if (!paused_) { | |
670 ret += base::TimeTicks::Now() - remote_time_at_; | |
671 } | |
672 return ret.InSecondsF(); | |
673 } | |
674 #endif | |
675 | |
676 if (paused_) { | |
677 return paused_time_.InSecondsF(); | |
678 } | |
679 | |
680 return pipeline_.GetMediaTime().InSecondsF(); | |
590 } | 681 } |
591 | 682 |
592 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { | 683 WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { |
593 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 684 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
594 return network_state_; | 685 return network_state_; |
595 } | 686 } |
596 | 687 |
597 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { | 688 WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const { |
598 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 689 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
599 return ready_state_; | 690 return ready_state_; |
(...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1041 #endif // !defined(OS_ANDROID) | 1132 #endif // !defined(OS_ANDROID) |
1042 | 1133 |
1043 if (base::CommandLine::ForCurrentProcess()->HasSwitch( | 1134 if (base::CommandLine::ForCurrentProcess()->HasSwitch( |
1044 switches::kDisableMediaSuspend)) { | 1135 switches::kDisableMediaSuspend)) { |
1045 return; | 1136 return; |
1046 } | 1137 } |
1047 | 1138 |
1048 if (!pipeline_.IsRunning()) | 1139 if (!pipeline_.IsRunning()) |
1049 return; | 1140 return; |
1050 | 1141 |
1142 if (suspended_ || suspending_) | |
1143 return; | |
1144 | |
1051 if (resuming_ || seeking_) { | 1145 if (resuming_ || seeking_) { |
1052 pending_suspend_ = true; | 1146 pending_suspend_ = true; |
1053 return; | 1147 return; |
1054 } | 1148 } |
1055 | 1149 |
1056 if (pending_resume_) { | 1150 if (pending_resume_) { |
1057 pending_resume_ = false; | 1151 pending_resume_ = false; |
1058 return; | 1152 return; |
1059 } | 1153 } |
1060 | 1154 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1094 return; | 1188 return; |
1095 } | 1189 } |
1096 | 1190 |
1097 if (pending_suspend_) { | 1191 if (pending_suspend_) { |
1098 pending_suspend_ = false; | 1192 pending_suspend_ = false; |
1099 return; | 1193 return; |
1100 } | 1194 } |
1101 | 1195 |
1102 // We may not be suspended if we were not yet subscribed or the pipeline was | 1196 // We may not be suspended if we were not yet subscribed or the pipeline was |
1103 // not yet started when OnHidden() fired. | 1197 // not yet started when OnHidden() fired. |
1104 if (!suspended_) | 1198 if (!suspended_ || resuming_) |
1105 return; | 1199 return; |
1106 | 1200 |
1107 Resume(); | 1201 Resume(); |
1108 } | 1202 } |
1109 | 1203 |
1110 void WebMediaPlayerImpl::Resume() { | 1204 void WebMediaPlayerImpl::Resume() { |
1111 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 1205 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
1112 CHECK(suspended_); | 1206 CHECK(suspended_); |
1113 CHECK(!resuming_); | 1207 CHECK(!resuming_); |
1114 | 1208 |
(...skipping 18 matching lines...) Expand all Loading... | |
1133 | 1227 |
1134 if (chunk_demuxer_) | 1228 if (chunk_demuxer_) |
1135 chunk_demuxer_->StartWaitingForSeek(seek_time_); | 1229 chunk_demuxer_->StartWaitingForSeek(seek_time_); |
1136 | 1230 |
1137 resuming_ = true; | 1231 resuming_ = true; |
1138 pipeline_.Resume(CreateRenderer(), seek_time_, | 1232 pipeline_.Resume(CreateRenderer(), seek_time_, |
1139 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, | 1233 BIND_TO_RENDER_LOOP1(&WebMediaPlayerImpl::OnPipelineSeeked, |
1140 time_changed)); | 1234 time_changed)); |
1141 } | 1235 } |
1142 | 1236 |
1237 #if defined(WIMPI_CAST) | |
1238 void WebMediaPlayerImpl::set_media_player_manager( | |
1239 RendererMediaPlayerManagerInterface* media_player_manager) { | |
1240 player_manager_ = media_player_manager; | |
1241 player_id_ = player_manager_->RegisterMediaPlayer(this); | |
1242 } | |
1243 | |
1244 void WebMediaPlayerImpl::requestRemotePlayback() { | |
1245 player_manager_->Seek(player_id_, | |
1246 base::TimeDelta::FromSecondsD(currentTime())); | |
1247 player_manager_->RequestRemotePlayback(player_id_); | |
1248 } | |
1249 | |
1250 void WebMediaPlayerImpl::requestRemotePlaybackControl() { | |
1251 player_manager_->RequestRemotePlaybackControl(player_id_); | |
1252 } | |
1253 | |
1254 // RendererMediaPlayerInterface implementation | |
1255 void WebMediaPlayerImpl::OnMediaMetadataChanged(base::TimeDelta duration, | |
1256 int width, | |
1257 int height, | |
1258 bool success) {} | |
1259 void WebMediaPlayerImpl::OnPlaybackComplete() { | |
1260 DVLOG(1) << __FUNCTION__; | |
1261 ended_ = true; | |
1262 client_->timeChanged(); | |
1263 } | |
1264 void WebMediaPlayerImpl::OnBufferingUpdate(int percentage) { | |
1265 DVLOG(1) << __FUNCTION__; | |
1266 } | |
1267 void WebMediaPlayerImpl::OnSeekRequest(const base::TimeDelta& time_to_seek) { | |
1268 DVLOG(1) << __FUNCTION__; | |
1269 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1270 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
1271 client_->requestSeek(time_to_seek.InSecondsF()); | |
1272 } | |
1273 void WebMediaPlayerImpl::OnSeekComplete(const base::TimeDelta& current_time) { | |
1274 DVLOG(1) << __FUNCTION__; | |
1275 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1276 remote_time_at_ = base::TimeTicks::Now(); | |
1277 remote_time_ = current_time; | |
1278 OnPipelineSeeked(true, PIPELINE_OK); | |
1279 } | |
1280 | |
1281 void WebMediaPlayerImpl::OnMediaError(int error_type) { | |
1282 DVLOG(1) << __FUNCTION__; | |
1283 } | |
1284 void WebMediaPlayerImpl::OnVideoSizeChanged(int width, int height) { | |
1285 DVLOG(1) << __FUNCTION__; | |
1286 } | |
1287 | |
1288 void WebMediaPlayerImpl::OnTimeUpdate(base::TimeDelta current_timestamp, | |
1289 base::TimeTicks current_time_ticks) { | |
1290 DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); | |
1291 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1292 remote_time_at_ = current_time_ticks; | |
1293 remote_time_ = current_timestamp; | |
1294 } | |
1295 | |
1296 void WebMediaPlayerImpl::OnPlayerReleased() { | |
1297 DVLOG(1) << __FUNCTION__; | |
1298 } | |
1299 | |
1300 void WebMediaPlayerImpl::OnConnectedToRemoteDevice( | |
1301 const std::string& remote_playback_message) { | |
1302 DVLOG(1) << __FUNCTION__; | |
1303 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1304 remote_time_ = base::TimeDelta::FromSecondsD(currentTime()); | |
1305 // Set paused so that progress bar doesn't advance while remote playback | |
1306 // is starting. | |
1307 pause(); | |
1308 is_remote_ = true; | |
1309 DrawRemotePlaybackText(remote_playback_message); | |
1310 client_->connectedToRemoteDevice(); | |
1311 } | |
1312 | |
1313 void WebMediaPlayerImpl::OnDisconnectedFromRemoteDevice() { | |
1314 DVLOG(1) << __FUNCTION__; | |
1315 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1316 paused_time_ = base::TimeDelta::FromSecondsD(currentTime()); | |
1317 is_remote_ = false; | |
1318 if (suspended_ && !resuming_) | |
1319 Resume(); | |
1320 if (ended_ || | |
1321 paused_time_ + base::TimeDelta::FromMilliseconds( | |
1322 media::kTimeUpdateInterval * 2) > | |
1323 pipeline_.GetMediaDuration()) { | |
1324 paused_time_ = pipeline_.GetMediaDuration(); | |
1325 } | |
1326 | |
1327 // We already told the delegate we're paused when remoting started. | |
1328 pipeline_.SetPlaybackRate(0.0); | |
1329 client_->disconnectedFromRemoteDevice(); | |
1330 if (!paused_) { | |
1331 paused_ = true; | |
1332 client_->playbackStateChanged(); | |
1333 } | |
1334 seek(paused_time_.InSecondsF()); | |
1335 // Seek resets ended_, which is not really what we want. | |
1336 if (paused_time_ == pipeline_.GetMediaDuration()) { | |
1337 ended_ = true; | |
1338 } | |
1339 } | |
1340 | |
1341 void WebMediaPlayerImpl::OnDidExitFullscreen() { | |
1342 DVLOG(1) << __FUNCTION__; | |
1343 } | |
1344 void WebMediaPlayerImpl::OnMediaPlayerPlay() { | |
1345 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
1346 if (is_remote_ && paused_) { | |
1347 paused_ = false; | |
1348 remote_time_at_ = base::TimeTicks::Now(); | |
1349 client_->playbackStateChanged(); | |
1350 } | |
1351 // Blink expects a timeChanged() in response to a seek(). | |
1352 if (should_notify_time_changed_) | |
1353 client_->timeChanged(); | |
1354 } | |
1355 void WebMediaPlayerImpl::OnMediaPlayerPause() { | |
1356 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
1357 if (is_remote_ && !paused_) { | |
1358 paused_ = true; | |
1359 client_->playbackStateChanged(); | |
1360 } | |
1361 } | |
1362 void WebMediaPlayerImpl::OnRemoteRouteAvailabilityChanged( | |
1363 bool routes_available) { | |
1364 DVLOG(1) << __FUNCTION__; | |
1365 client_->remoteRouteAvailabilityChanged(routes_available); | |
1366 } | |
1367 | |
1368 void WebMediaPlayerImpl::ReleaseMediaResources() {} | |
1369 | |
1370 #if defined(VIDEO_HOLE) | |
1371 bool WebMediaPlayerImpl::UpdateBoundaryRectangle() { | |
1372 return false; | |
1373 } | |
1374 const gfx::RectF WebMediaPlayerImpl::GetBoundaryRectangle() { | |
1375 return gfx::RectF(); | |
1376 } | |
1377 #endif // defined(VIDEO_HOLE) | |
1378 | |
1379 void WebMediaPlayerImpl::DrawRemotePlaybackText( | |
DaleCurtis
2016/01/07 21:28:39
Can you just abstract the existing code in WebMedi
hubbe
2016/01/11 22:47:24
I think it can, but it will not make it easier to
| |
1380 const std::string& remote_playback_message) { | |
1381 DVLOG(1) << __FUNCTION__; | |
1382 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
1383 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
1384 if (!video_weblayer_) | |
1385 return; | |
1386 | |
1387 // TODO(johnme): Should redraw this frame if the layer bounds change; but | |
1388 // there seems no easy way to listen for the layer resizing (as opposed to | |
1389 // OnVideoSizeChanged, which is when the frame sizes of the video file | |
1390 // change). Perhaps have to poll (on main thread of course)? | |
1391 gfx::Size video_size_css_px = video_weblayer_->bounds(); | |
1392 float device_scale_factor = frame_->view()->deviceScaleFactor(); | |
1393 // canvas_size will be the size in device pixels when pageScaleFactor == 1 | |
1394 gfx::Size canvas_size( | |
1395 static_cast<int>(video_size_css_px.width() * device_scale_factor), | |
1396 static_cast<int>(video_size_css_px.height() * device_scale_factor)); | |
1397 | |
1398 SkBitmap bitmap; | |
1399 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); | |
1400 | |
1401 // Create the canvas and draw the "Casting to <Chromecast>" text on it. | |
1402 SkCanvas canvas(bitmap); | |
1403 canvas.drawColor(SK_ColorBLACK); | |
1404 | |
1405 const SkScalar kTextSize(40); | |
1406 const SkScalar kMinPadding(40); | |
1407 | |
1408 SkPaint paint; | |
1409 paint.setAntiAlias(true); | |
1410 paint.setFilterQuality(kHigh_SkFilterQuality); | |
1411 paint.setColor(SK_ColorWHITE); | |
1412 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); | |
1413 paint.setTextSize(kTextSize); | |
1414 | |
1415 // Calculate the vertical margin from the top | |
1416 SkPaint::FontMetrics font_metrics; | |
1417 paint.getFontMetrics(&font_metrics); | |
1418 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; | |
1419 | |
1420 // Measure the width of the entire text to display | |
1421 size_t display_text_width = paint.measureText(remote_playback_message.c_str(), | |
1422 remote_playback_message.size()); | |
1423 std::string display_text(remote_playback_message); | |
1424 | |
1425 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { | |
1426 // The text is too long to fit in one line, truncate it and append ellipsis | |
1427 // to the end. | |
1428 | |
1429 // First, figure out how much of the canvas the '...' will take up. | |
1430 const std::string kTruncationEllipsis("\xE2\x80\xA6"); | |
1431 SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), | |
1432 kTruncationEllipsis.size()); | |
1433 | |
1434 // Then calculate how much of the text can be drawn with the '...' appended | |
1435 // to the end of the string. | |
1436 SkScalar sk_max_original_text_width(canvas_size.width() - | |
1437 (kMinPadding * 2) - sk_ellipse_width); | |
1438 size_t sk_max_original_text_length = paint.breakText( | |
1439 remote_playback_message.c_str(), remote_playback_message.size(), | |
1440 sk_max_original_text_width); | |
1441 | |
1442 // Remove the part of the string that doesn't fit and append '...'. | |
1443 display_text.erase( | |
1444 sk_max_original_text_length, | |
1445 remote_playback_message.size() - sk_max_original_text_length); | |
1446 display_text.append(kTruncationEllipsis); | |
1447 display_text_width = | |
1448 paint.measureText(display_text.c_str(), display_text.size()); | |
1449 } | |
1450 | |
1451 // Center the text horizontally. | |
1452 SkScalar sk_horizontal_margin = | |
1453 (canvas_size.width() - display_text_width) / 2.0; | |
1454 canvas.drawText(display_text.c_str(), display_text.size(), | |
1455 sk_horizontal_margin, sk_vertical_margin, paint); | |
1456 | |
1457 Context3D context_3d; | |
1458 if (!context_3d_cb_.is_null()) | |
1459 context_3d = context_3d_cb_.Run(); | |
1460 // GPU Process crashed. | |
1461 if (!context_3d.gl) | |
1462 return; | |
1463 | |
1464 GLES2Interface* gl = context_3d.gl; | |
1465 GLuint remote_playback_texture_id = 0; | |
1466 gl->GenTextures(1, &remote_playback_texture_id); | |
1467 GLuint texture_target = GL_TEXTURE_2D; | |
1468 gl->BindTexture(texture_target, remote_playback_texture_id); | |
1469 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
1470 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
1471 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
1472 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
1473 | |
1474 { | |
1475 SkAutoLockPixels lock(bitmap); | |
1476 gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, | |
1477 bitmap.width(), bitmap.height(), 0 /* border */, | |
1478 GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, | |
1479 bitmap.getPixels()); | |
1480 } | |
1481 | |
1482 gpu::Mailbox texture_mailbox; | |
1483 gl->GenMailboxCHROMIUM(texture_mailbox.name); | |
1484 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); | |
1485 gl->Flush(); | |
1486 gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); | |
1487 | |
1488 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( | |
1489 media::PIXEL_FORMAT_ARGB, | |
1490 gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, | |
1491 texture_target), | |
1492 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb_, | |
1493 remote_playback_texture_id)), | |
1494 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, | |
1495 canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); | |
1496 | |
1497 if (!suspended_ && !suspending_) | |
1498 Suspend(); | |
1499 compositor_->PaintFrameUsingOldRenderingPath(new_frame); | |
1500 } | |
1501 | |
1502 #endif // defined(WIMPI_CAST) | |
1503 | |
1143 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { | 1504 void WebMediaPlayerImpl::DataSourceInitialized(bool success) { |
1144 DVLOG(1) << __FUNCTION__; | 1505 DVLOG(1) << __FUNCTION__; |
1145 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 1506 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
1146 | 1507 |
1147 if (!success) { | 1508 if (!success) { |
1148 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); | 1509 SetNetworkState(WebMediaPlayer::NetworkStateFormatError); |
1149 return; | 1510 return; |
1150 } | 1511 } |
1151 | 1512 |
1152 StartPipeline(); | 1513 StartPipeline(); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1314 | 1675 |
1315 // pause() may be called after playback has ended and the HTMLMediaElement | 1676 // pause() may be called after playback has ended and the HTMLMediaElement |
1316 // requires that currentTime() == duration() after ending. We want to ensure | 1677 // requires that currentTime() == duration() after ending. We want to ensure |
1317 // |paused_time_| matches currentTime() in this case or a future seek() may | 1678 // |paused_time_| matches currentTime() in this case or a future seek() may |
1318 // incorrectly discard what it thinks is a seek to the existing time. | 1679 // incorrectly discard what it thinks is a seek to the existing time. |
1319 paused_time_ = | 1680 paused_time_ = |
1320 ended_ ? pipeline_.GetMediaDuration() : pipeline_.GetMediaTime(); | 1681 ended_ ? pipeline_.GetMediaDuration() : pipeline_.GetMediaTime(); |
1321 } | 1682 } |
1322 | 1683 |
1323 void WebMediaPlayerImpl::NotifyPlaybackStarted() { | 1684 void WebMediaPlayerImpl::NotifyPlaybackStarted() { |
1685 #if defined(WIMPI_CAST) | |
1686 // We do not tell our delegates about remote playback, becuase that would | |
1687 // keep the device awake, which is not what we want. | |
1688 if (!is_remote_) | |
liberato (no reviews please)
2016/01/08 16:37:58
the comment makes me expect that it should check f
hubbe
2016/01/11 22:47:24
Good catch, fixed.
(Testing...)
| |
1689 return; | |
1690 #endif | |
1324 if (delegate_) | 1691 if (delegate_) |
1325 delegate_->DidPlay(this); | 1692 delegate_->DidPlay(this); |
1326 if (!memory_usage_reporting_timer_.IsRunning()) { | 1693 if (!memory_usage_reporting_timer_.IsRunning()) { |
1327 memory_usage_reporting_timer_.Start(FROM_HERE, | 1694 memory_usage_reporting_timer_.Start(FROM_HERE, |
1328 base::TimeDelta::FromSeconds(2), this, | 1695 base::TimeDelta::FromSeconds(2), this, |
1329 &WebMediaPlayerImpl::ReportMemoryUsage); | 1696 &WebMediaPlayerImpl::ReportMemoryUsage); |
1330 } | 1697 } |
1331 } | 1698 } |
1332 | 1699 |
1333 void WebMediaPlayerImpl::NotifyPlaybackPaused() { | 1700 void WebMediaPlayerImpl::NotifyPlaybackPaused() { |
1701 #if defined(WIMPI_CAST) | |
1702 if (!is_remote_) | |
1703 return; | |
1704 #endif | |
1334 if (delegate_) | 1705 if (delegate_) |
1335 delegate_->DidPause(this); | 1706 delegate_->DidPause(this); |
1336 memory_usage_reporting_timer_.Stop(); | 1707 memory_usage_reporting_timer_.Stop(); |
1337 ReportMemoryUsage(); | 1708 ReportMemoryUsage(); |
1338 } | 1709 } |
1339 | 1710 |
1340 void WebMediaPlayerImpl::ReportMemoryUsage() { | 1711 void WebMediaPlayerImpl::ReportMemoryUsage() { |
1341 DCHECK(main_task_runner_->BelongsToCurrentThread()); | 1712 DCHECK(main_task_runner_->BelongsToCurrentThread()); |
1342 | 1713 |
1343 // About base::Unretained() usage below: We destroy |demuxer_| on the main | 1714 // About base::Unretained() usage below: We destroy |demuxer_| on the main |
(...skipping 24 matching lines...) Expand all Loading... | |
1368 << ", Video: " << stats.video_memory_usage << ", DataSource: " | 1739 << ", Video: " << stats.video_memory_usage << ", DataSource: " |
1369 << (data_source_ ? data_source_->GetMemoryUsage() : 0) | 1740 << (data_source_ ? data_source_->GetMemoryUsage() : 0) |
1370 << ", Demuxer: " << demuxer_memory_usage; | 1741 << ", Demuxer: " << demuxer_memory_usage; |
1371 | 1742 |
1372 const int64_t delta = current_memory_usage - last_reported_memory_usage_; | 1743 const int64_t delta = current_memory_usage - last_reported_memory_usage_; |
1373 last_reported_memory_usage_ = current_memory_usage; | 1744 last_reported_memory_usage_ = current_memory_usage; |
1374 adjust_allocated_memory_cb_.Run(delta); | 1745 adjust_allocated_memory_cb_.Run(delta); |
1375 } | 1746 } |
1376 | 1747 |
1377 } // namespace media | 1748 } // namespace media |
OLD | NEW |