OLD | NEW |
---|---|
(Empty) | |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/blink/webmediaplayer_cast_android.h" | |
6 | |
7 #include "gpu/GLES2/gl2extchromium.h" | |
8 #include "gpu/blink/webgraphicscontext3d_impl.h" | |
9 #include "gpu/command_buffer/client/gles2_interface.h" | |
10 #include "gpu/command_buffer/common/sync_token.h" | |
11 #include "media/base/android/media_common_android.h" | |
12 #include "media/base/bind_to_current_loop.h" | |
13 #include "media/blink/webmediaplayer_impl.h" | |
14 #include "media/blink/webmediaplayer_params.h" | |
15 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h" | |
16 #include "third_party/WebKit/public/web/WebDocument.h" | |
17 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
18 #include "third_party/skia/include/core/SkCanvas.h" | |
19 #include "third_party/skia/include/core/SkPaint.h" | |
20 #include "third_party/skia/include/core/SkTypeface.h" | |
21 #include "third_party/skia/include/gpu/GrContext.h" | |
22 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
23 | |
24 using gpu::gles2::GLES2Interface; | |
25 | |
26 namespace media { | |
27 | |
28 namespace { | |
29 // File-static function is to allow it to run even after WMPA is deleted. | |
liberato (no reviews please)
2016/01/12 22:14:58
/WMPA/WMPI/
hubbe
2016/01/12 22:39:25
Done.
| |
30 void OnReleaseTexture(const WebMediaPlayerParams::Context3DCB& context_3d_cb, | |
31 GLuint texture_id, | |
32 const gpu::SyncToken& sync_token) { | |
33 Context3D context_3d; | |
34 if (!context_3d_cb.is_null()) | |
35 context_3d = context_3d_cb.Run(); | |
36 // GPU Process crashed. | |
37 if (!context_3d.gl) | |
38 return; | |
39 | |
40 GLES2Interface* gl = context_3d.gl; | |
41 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
42 gl->DeleteTextures(1, &texture_id); | |
43 // Flush to ensure that the stream texture gets deleted in a timely fashion. | |
liberato (no reviews please)
2016/01/12 22:14:58
stale comment -- /stream //
hubbe
2016/01/12 22:39:25
Done.
| |
44 gl->ShallowFlushCHROMIUM(); | |
45 } | |
46 | |
47 } // namespace | |
48 | |
49 WebMediaPlayerCast::WebMediaPlayerCast( | |
50 WebMediaPlayerImpl* impl, | |
51 blink::WebMediaPlayerClient* client, | |
52 const WebMediaPlayerParams::Context3DCB& context_3d_cb) | |
53 : webmediaplayer_(impl), client_(client), context_3d_cb_(context_3d_cb) {} | |
54 WebMediaPlayerCast::~WebMediaPlayerCast() { | |
liberato (no reviews please)
2016/01/12 22:14:58
\n
hubbe
2016/01/12 22:39:25
Done.
| |
55 if (player_manager_) { | |
56 if (is_player_initialized_) | |
57 player_manager_->DestroyPlayer(player_id_); | |
58 | |
59 player_manager_->UnregisterMediaPlayer(player_id_); | |
60 } | |
61 } | |
62 | |
63 void WebMediaPlayerCast::Initialize(const GURL& url, | |
64 blink::WebLocalFrame* frame) { | |
65 player_manager_->Initialize(MEDIA_PLAYER_TYPE_URL, player_id_, url, | |
66 frame->document().firstPartyForCookies(), 0, | |
67 frame->document().url(), true); | |
68 is_player_initialized_ = true; | |
69 } | |
70 | |
71 void WebMediaPlayerCast::set_media_player_manager( | |
72 RendererMediaPlayerManagerInterface* media_player_manager) { | |
73 player_manager_ = media_player_manager; | |
74 player_id_ = player_manager_->RegisterMediaPlayer(this); | |
75 } | |
76 | |
77 void WebMediaPlayerCast::requestRemotePlayback() { | |
78 player_manager_->Seek(player_id_, base::TimeDelta::FromSecondsD( | |
79 webmediaplayer_->currentTime())); | |
80 player_manager_->RequestRemotePlayback(player_id_); | |
81 } | |
82 | |
83 void WebMediaPlayerCast::requestRemotePlaybackControl() { | |
84 player_manager_->RequestRemotePlaybackControl(player_id_); | |
85 } | |
86 | |
87 // RendererMediaPlayerInterface implementation | |
88 void WebMediaPlayerCast::OnMediaMetadataChanged(base::TimeDelta duration, | |
89 int width, | |
90 int height, | |
91 bool success) {} | |
92 void WebMediaPlayerCast::OnPlaybackComplete() { | |
93 DVLOG(1) << __FUNCTION__; | |
94 webmediaplayer_->OnRemotePlaybackEnded(); | |
95 } | |
96 void WebMediaPlayerCast::OnBufferingUpdate(int percentage) { | |
97 DVLOG(1) << __FUNCTION__; | |
98 } | |
99 void WebMediaPlayerCast::OnSeekRequest(const base::TimeDelta& time_to_seek) { | |
100 DVLOG(1) << __FUNCTION__; | |
101 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
102 client_->requestSeek(time_to_seek.InSecondsF()); | |
103 } | |
104 void WebMediaPlayerCast::OnSeekComplete(const base::TimeDelta& current_time) { | |
105 DVLOG(1) << __FUNCTION__; | |
106 remote_time_at_ = base::TimeTicks::Now(); | |
107 remote_time_ = current_time; | |
108 webmediaplayer_->OnPipelineSeeked(true, PIPELINE_OK); | |
109 } | |
110 | |
111 void WebMediaPlayerCast::OnMediaError(int error_type) { | |
112 DVLOG(1) << __FUNCTION__; | |
113 } | |
114 void WebMediaPlayerCast::OnVideoSizeChanged(int width, int height) { | |
115 DVLOG(1) << __FUNCTION__; | |
116 } | |
117 | |
118 void WebMediaPlayerCast::OnTimeUpdate(base::TimeDelta current_timestamp, | |
119 base::TimeTicks current_time_ticks) { | |
120 DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); | |
121 remote_time_at_ = current_time_ticks; | |
122 remote_time_ = current_timestamp; | |
123 } | |
124 | |
125 void WebMediaPlayerCast::OnPlayerReleased() { | |
126 DVLOG(1) << __FUNCTION__; | |
127 } | |
128 | |
129 void WebMediaPlayerCast::OnConnectedToRemoteDevice( | |
130 const std::string& remote_playback_message) { | |
131 DVLOG(1) << __FUNCTION__; | |
132 remote_time_ = base::TimeDelta::FromSecondsD(webmediaplayer_->currentTime()); | |
133 // Set paused so that progress bar doesn't advance while remote playback | |
134 // is starting. | |
135 webmediaplayer_->pause(); | |
136 is_remote_ = true; | |
137 DrawRemotePlaybackText(remote_playback_message); | |
138 client_->connectedToRemoteDevice(); | |
139 } | |
140 | |
141 double WebMediaPlayerCast::currentTime() const { | |
142 base::TimeDelta ret = remote_time_; | |
143 if (!paused_) { | |
144 ret += base::TimeTicks::Now() - remote_time_at_; | |
145 } | |
146 return ret.InSecondsF(); | |
147 } | |
148 | |
149 void WebMediaPlayerCast::play() { | |
150 player_manager_->Start(player_id_); | |
151 remote_time_at_ = base::TimeTicks::Now(); | |
152 paused_ = false; | |
153 } | |
154 void WebMediaPlayerCast::pause() { | |
155 player_manager_->Pause(player_id_, true); | |
156 } | |
157 | |
158 void WebMediaPlayerCast::seek(base::TimeDelta t) { | |
159 should_notify_time_changed_ = true; | |
160 player_manager_->Seek(player_id_, t); | |
161 } | |
162 | |
163 void WebMediaPlayerCast::OnDisconnectedFromRemoteDevice() { | |
164 DVLOG(1) << __FUNCTION__; | |
165 is_remote_ = false; | |
166 double t = currentTime(); | |
167 if (t + media::kTimeUpdateInterval * 2 / 1000 > webmediaplayer_->duration()) { | |
168 t = webmediaplayer_->duration(); | |
169 } | |
170 webmediaplayer_->OnDisconnectedFromRemoteDevice(t); | |
171 } | |
172 | |
173 void WebMediaPlayerCast::OnDidExitFullscreen() { | |
174 DVLOG(1) << __FUNCTION__; | |
175 } | |
176 void WebMediaPlayerCast::OnMediaPlayerPlay() { | |
177 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
178 if (is_remote_ && paused_) { | |
179 paused_ = false; | |
180 remote_time_at_ = base::TimeTicks::Now(); | |
181 client_->playbackStateChanged(); | |
182 } | |
183 // Blink expects a timeChanged() in response to a seek(). | |
184 if (should_notify_time_changed_) | |
185 client_->timeChanged(); | |
186 } | |
187 void WebMediaPlayerCast::OnMediaPlayerPause() { | |
188 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; | |
189 if (is_remote_ && !paused_) { | |
190 paused_ = true; | |
191 client_->playbackStateChanged(); | |
192 } | |
193 } | |
194 void WebMediaPlayerCast::OnRemoteRouteAvailabilityChanged( | |
195 bool routes_available) { | |
196 DVLOG(1) << __FUNCTION__; | |
197 client_->remoteRouteAvailabilityChanged(routes_available); | |
198 } | |
199 | |
200 void WebMediaPlayerCast::ReleaseMediaResources() {} | |
201 void WebMediaPlayerCast::OnWaitingForDecryptionKey() {} | |
202 bool WebMediaPlayerCast::hasVideo() const { | |
203 return true; | |
204 } | |
205 bool WebMediaPlayerCast::paused() const { | |
206 return paused_; | |
207 } | |
208 | |
209 #if defined(VIDEO_HOLE) | |
210 bool WebMediaPlayerCast::UpdateBoundaryRectangle() { | |
211 return false; | |
212 } | |
213 const gfx::RectF WebMediaPlayerCast::GetBoundaryRectangle() { | |
214 return gfx::RectF(); | |
215 } | |
216 #endif // defined(VIDEO_HOLE) | |
217 | |
218 void WebMediaPlayerCast::DrawRemotePlaybackText( | |
219 const std::string& remote_playback_message) { | |
220 DVLOG(1) << __FUNCTION__; | |
221 // DCHECK(main_thread_checker_.CalledOnValidThread()); | |
222 | |
223 // TODO(johnme): Should redraw this frame if the layer bounds change; but | |
224 // there seems no easy way to listen for the layer resizing (as opposed to | |
225 // OnVideoSizeChanged, which is when the frame sizes of the video file | |
226 // change). Perhaps have to poll (on main thread of course)? | |
227 gfx::Size canvas_size = webmediaplayer_->GetCanvasSize(); | |
228 if (!canvas_size.width()) | |
229 return; | |
230 | |
231 SkBitmap bitmap; | |
232 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); | |
233 | |
234 // Create the canvas and draw the "Casting to <Chromecast>" text on it. | |
235 SkCanvas canvas(bitmap); | |
236 canvas.drawColor(SK_ColorBLACK); | |
237 | |
238 const SkScalar kTextSize(40); | |
239 const SkScalar kMinPadding(40); | |
240 | |
241 SkPaint paint; | |
242 paint.setAntiAlias(true); | |
243 paint.setFilterQuality(kHigh_SkFilterQuality); | |
244 paint.setColor(SK_ColorWHITE); | |
245 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); | |
246 paint.setTextSize(kTextSize); | |
247 | |
248 // Calculate the vertical margin from the top | |
249 SkPaint::FontMetrics font_metrics; | |
250 paint.getFontMetrics(&font_metrics); | |
251 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; | |
252 | |
253 // Measure the width of the entire text to display | |
254 size_t display_text_width = paint.measureText(remote_playback_message.c_str(), | |
255 remote_playback_message.size()); | |
256 std::string display_text(remote_playback_message); | |
257 | |
258 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { | |
259 // The text is too long to fit in one line, truncate it and append ellipsis | |
260 // to the end. | |
261 | |
262 // First, figure out how much of the canvas the '...' will take up. | |
263 const std::string kTruncationEllipsis("\xE2\x80\xA6"); | |
264 SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), | |
265 kTruncationEllipsis.size()); | |
266 | |
267 // Then calculate how much of the text can be drawn with the '...' appended | |
268 // to the end of the string. | |
269 SkScalar sk_max_original_text_width(canvas_size.width() - | |
270 (kMinPadding * 2) - sk_ellipse_width); | |
271 size_t sk_max_original_text_length = paint.breakText( | |
272 remote_playback_message.c_str(), remote_playback_message.size(), | |
273 sk_max_original_text_width); | |
274 | |
275 // Remove the part of the string that doesn't fit and append '...'. | |
276 display_text.erase( | |
277 sk_max_original_text_length, | |
278 remote_playback_message.size() - sk_max_original_text_length); | |
279 display_text.append(kTruncationEllipsis); | |
280 display_text_width = | |
281 paint.measureText(display_text.c_str(), display_text.size()); | |
282 } | |
283 | |
284 // Center the text horizontally. | |
285 SkScalar sk_horizontal_margin = | |
286 (canvas_size.width() - display_text_width) / 2.0; | |
287 canvas.drawText(display_text.c_str(), display_text.size(), | |
288 sk_horizontal_margin, sk_vertical_margin, paint); | |
289 | |
290 Context3D context_3d; | |
291 if (!context_3d_cb_.is_null()) | |
292 context_3d = context_3d_cb_.Run(); | |
293 if (!context_3d.gl) | |
294 return; | |
295 | |
296 GLES2Interface* gl = context_3d.gl; | |
297 GLuint remote_playback_texture_id = 0; | |
298 gl->GenTextures(1, &remote_playback_texture_id); | |
299 GLuint texture_target = GL_TEXTURE_2D; | |
300 gl->BindTexture(texture_target, remote_playback_texture_id); | |
301 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
302 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); | |
303 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
304 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
305 | |
306 { | |
307 SkAutoLockPixels lock(bitmap); | |
308 gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, | |
309 bitmap.width(), bitmap.height(), 0 /* border */, | |
310 GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, | |
311 bitmap.getPixels()); | |
312 } | |
313 | |
314 gpu::Mailbox texture_mailbox; | |
315 gl->GenMailboxCHROMIUM(texture_mailbox.name); | |
316 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); | |
317 gl->Flush(); | |
318 gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); | |
319 | |
320 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( | |
321 media::PIXEL_FORMAT_ARGB, | |
322 gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, | |
323 texture_target), | |
324 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb_, | |
325 remote_playback_texture_id)), | |
326 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, | |
327 canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); | |
328 | |
329 webmediaplayer_->SuspendForRemote(new_frame); | |
330 } | |
331 | |
332 } // namespace media | |
OLD | NEW |