Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(509)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 13496009: Hookup the MediaStream glue for Adding and Removing tracks to an existing MediaStream. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Add workaround when there are no microphones on bots. Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/synchronization/waitable_event.h" 9 #include "base/synchronization/waitable_event.h"
10 #include "base/utf_string_conversions.h" 10 #include "base/utf_string_conversions.h"
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
58 webrtc::MediaConstraintsInterface::Constraint constraint; 58 webrtc::MediaConstraintsInterface::Constraint constraint;
59 constraint.key = kWebAudioConstraints[i].key; 59 constraint.key = kWebAudioConstraints[i].key;
60 constraint.value = kWebAudioConstraints[i].value; 60 constraint.value = kWebAudioConstraints[i].value;
61 61
62 DVLOG(1) << "WebAudioConstraints: " << constraint.key 62 DVLOG(1) << "WebAudioConstraints: " << constraint.key
63 << " : " << constraint.value; 63 << " : " << constraint.value;
64 mandatory_.push_back(constraint); 64 mandatory_.push_back(constraint);
65 } 65 }
66 } 66 }
67 67
68 virtual ~WebAudioConstraints() {}; 68 virtual ~WebAudioConstraints() {}
69 }; 69 };
70 70
71 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { 71 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
72 public: 72 public:
73 P2PPortAllocatorFactory( 73 P2PPortAllocatorFactory(
74 P2PSocketDispatcher* socket_dispatcher, 74 P2PSocketDispatcher* socket_dispatcher,
75 talk_base::NetworkManager* network_manager, 75 talk_base::NetworkManager* network_manager,
76 talk_base::PacketSocketFactory* socket_factory, 76 talk_base::PacketSocketFactory* socket_factory,
77 WebKit::WebFrame* web_frame) 77 WebKit::WebFrame* web_frame)
78 : socket_dispatcher_(socket_dispatcher), 78 : socket_dispatcher_(socket_dispatcher),
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
297 source_observer->AddSource(source_data->local_audio_source()); 297 source_observer->AddSource(source_data->local_audio_source());
298 } 298 }
299 299
300 source_observer->StartObservering(); 300 source_observer->StartObservering();
301 } 301 }
302 302
303 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 303 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
304 WebKit::WebMediaStream* description) { 304 WebKit::WebMediaStream* description) {
305 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 305 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
306 if (!EnsurePeerConnectionFactory()) { 306 if (!EnsurePeerConnectionFactory()) {
307 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; 307 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
308 return; 308 return;
309 } 309 }
310 310
311 std::string label = UTF16ToUTF8(description->label()); 311 std::string label = UTF16ToUTF8(description->label());
312 scoped_refptr<webrtc::MediaStreamInterface> native_stream = 312 scoped_refptr<webrtc::MediaStreamInterface> native_stream =
313 CreateLocalMediaStream(label); 313 CreateLocalMediaStream(label);
314 MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream,
315 true);
316 description->setExtraData(extra_data);
314 317
315 // Add audio tracks. 318 // Add audio tracks.
316 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks; 319 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
317 description->audioSources(audio_tracks); 320 description->audioTracks(audio_tracks);
318
319 bool start_stream = false;
320 for (size_t i = 0; i < audio_tracks.size(); ++i) { 321 for (size_t i = 0; i < audio_tracks.size(); ++i) {
321 WebKit::WebMediaStreamSource source = audio_tracks[i].source(); 322 AddNativeMediaStreamTrack(*description, audio_tracks[i]);
322
323 // See if we're adding a WebAudio MediaStream.
324 if (source.requiresAudioConsumer()) {
325 // TODO(crogers, xians): In reality we should be able to send a unique
326 // audio stream to each PeerConnection separately. But currently WebRTC
327 // is only able to handle a global audio stream sent to ALL peers.
328
329 // Create a special source where default WebAudio constraints are used.
330 if (!CreateWebAudioSource(&source)) {
331 LOG(ERROR) << "Failed to create WebAudio source";
332 continue;
333 }
334 }
335
336 MediaStreamSourceExtraData* source_data =
337 static_cast<MediaStreamSourceExtraData*>(source.extraData());
338
339 if (!source_data) {
340 // TODO(perkj): Implement support for sources from
341 // remote MediaStreams.
342 NOTIMPLEMENTED();
343 continue;
344 }
345
346 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
347 CreateLocalAudioTrack(UTF16ToUTF8(audio_tracks[i].id()),
348 source_data->local_audio_source()));
349 native_stream->AddTrack(audio_track);
350 audio_track->set_enabled(audio_tracks[i].isEnabled());
351 start_stream = true;
352 }
353
354 if (start_stream && GetWebRtcAudioDevice()) {
355 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
356 capturer->Start();
357 } 323 }
358 324
359 // Add video tracks. 325 // Add video tracks.
360 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks; 326 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
361 description->videoSources(video_tracks); 327 description->videoTracks(video_tracks);
362 for (size_t i = 0; i < video_tracks.size(); ++i) { 328 for (size_t i = 0; i < video_tracks.size(); ++i) {
363 const WebKit::WebMediaStreamSource& source = video_tracks[i].source(); 329 AddNativeMediaStreamTrack(*description, video_tracks[i]);
364 MediaStreamSourceExtraData* source_data =
365 static_cast<MediaStreamSourceExtraData*>(source.extraData());
366 if (!source_data || !source_data->video_source()) {
367 // TODO(perkj): Implement support for sources from remote MediaStreams.
368 NOTIMPLEMENTED();
369 continue;
370 }
371
372 scoped_refptr<webrtc::VideoTrackInterface> video_track(
373 CreateLocalVideoTrack(UTF16ToUTF8(video_tracks[i].id()),
374 source_data->video_source()));
375
376 native_stream->AddTrack(video_track);
377 video_track->set_enabled(video_tracks[i].isEnabled());
378 } 330 }
379
380 MediaStreamExtraData* extra_data = new MediaStreamExtraData(native_stream,
381 true);
382 description->setExtraData(extra_data);
383 } 331 }
384 332
385 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 333 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
386 WebKit::WebMediaStream* description, 334 WebKit::WebMediaStream* description,
387 const MediaStreamExtraData::StreamStopCallback& stream_stop) { 335 const MediaStreamExtraData::StreamStopCallback& stream_stop) {
388 CreateNativeLocalMediaStream(description); 336 CreateNativeLocalMediaStream(description);
389 337
390 MediaStreamExtraData* extra_data = 338 MediaStreamExtraData* extra_data =
391 static_cast<MediaStreamExtraData*>(description->extraData()); 339 static_cast<MediaStreamExtraData*>(description->extraData());
392 extra_data->SetLocalStreamStopCallback(stream_stop); 340 extra_data->SetLocalStreamStopCallback(stream_stop);
393 } 341 }
394 342
343 bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack(
344 const WebKit::WebMediaStream& stream,
345 const WebKit::WebMediaStreamTrack& track) {
346 MediaStreamExtraData* extra_data =
347 static_cast<MediaStreamExtraData*>(stream.extraData());
348 webrtc::MediaStreamInterface* native_stream = extra_data->stream();
349 DCHECK(native_stream);
350
351 WebKit::WebMediaStreamSource source = track.source();
352 MediaStreamSourceExtraData* source_data =
353 static_cast<MediaStreamSourceExtraData*>(source.extraData());
354
355 if (!source_data) {
356 if (source.requiresAudioConsumer()) {
357 // We're adding a WebAudio MediaStream.
358 // TODO(crogers, xians): In reality we should be able to send a unique
359 // audio stream to each PeerConnection separately.
360 // Currently WebRTC is only able to handle a global audio stream sent to
361 // ALL peers. Create a special source where default WebAudio constraints
362 // are used.
363 if (CreateWebAudioSource(&source)) {
364 source_data =
365 static_cast<MediaStreamSourceExtraData*>(source.extraData());
366 } else {
367 LOG(ERROR) << "Failed to create WebAudio source";
368 return false;
369 }
370 } else {
371 // TODO(perkj): Implement support for sources from
372 // remote MediaStreams.
373 NOTIMPLEMENTED();
374 return false;
375 }
376 }
377
378 WebKit::WebMediaStreamSource::Type type = track.source().type();
379 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
380 type == WebKit::WebMediaStreamSource::TypeVideo);
381
382 std::string track_id = UTF16ToUTF8(track.id());
383 if (source.type() == WebKit::WebMediaStreamSource::TypeAudio) {
384 // TODO(henrika,xians): Refactor how an audio track is created to harmonize
385 // with video tracks.
386 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
387 CreateLocalAudioTrack(track_id, source_data->local_audio_source()));
388 audio_track->set_enabled(track.isEnabled());
389 if (GetWebRtcAudioDevice()) {
390 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
391 if (!capturer->is_recording())
392 capturer->Start();
393 }
394 return native_stream->AddTrack(audio_track);
395 } else {
396 scoped_refptr<webrtc::VideoTrackInterface> video_track(
397 CreateLocalVideoTrack(track_id, source_data->video_source()));
398 video_track->set_enabled(track.isEnabled());
399 return native_stream->AddTrack(video_track);
400 }
401 }
402
403 bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
404 const WebKit::WebMediaStream& stream,
405 const WebKit::WebMediaStreamTrack& track) {
406 MediaStreamExtraData* extra_data =
407 static_cast<MediaStreamExtraData*>(stream.extraData());
408 webrtc::MediaStreamInterface* native_stream = extra_data->stream();
409 DCHECK(native_stream);
410
411 WebKit::WebMediaStreamSource::Type type = track.source().type();
412 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
413 type == WebKit::WebMediaStreamSource::TypeVideo);
414
415 std::string track_id = UTF16ToUTF8(track.id());
416 return type == WebKit::WebMediaStreamSource::TypeAudio ?
417 native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)) :
418 native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id));
419 }
420
395 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 421 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
396 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; 422 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
397 if (!pc_factory_.get()) { 423 if (!pc_factory_.get()) {
398 DCHECK(!audio_device_); 424 DCHECK(!audio_device_);
399 audio_device_ = new WebRtcAudioDeviceImpl(); 425 audio_device_ = new WebRtcAudioDeviceImpl();
400 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( 426 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
401 webrtc::CreatePeerConnectionFactory(worker_thread_, 427 webrtc::CreatePeerConnectionFactory(worker_thread_,
402 signaling_thread_, 428 signaling_thread_,
403 audio_device_)); 429 audio_device_));
404 if (factory.get()) 430 if (factory.get())
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
663 // processed before returning. We wait for the above task to finish before 689 // processed before returning. We wait for the above task to finish before
664 // letting the the function continue to avoid any potential race issues. 690 // letting the the function continue to avoid any potential race issues.
665 chrome_worker_thread_.Stop(); 691 chrome_worker_thread_.Stop();
666 } else { 692 } else {
667 NOTREACHED() << "Worker thread not running."; 693 NOTREACHED() << "Worker thread not running.";
668 } 694 }
669 } 695 }
670 } 696 }
671 697
672 } // namespace content 698 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698