Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(80)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 12086092: Implement audio constraints for PeerConneciton API (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: remove one spa Created 7 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/synchronization/waitable_event.h" 9 #include "base/synchronization/waitable_event.h"
10 #include "base/utf_string_conversions.h" 10 #include "base/utf_string_conversions.h"
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after
211 MediaStreamSourceExtraData* source_data = 211 MediaStreamSourceExtraData* source_data =
212 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 212 static_cast<MediaStreamSourceExtraData*>(source.extraData());
213 if (!source_data) { 213 if (!source_data) {
214 // TODO(perkj): Implement support for sources from remote MediaStreams. 214 // TODO(perkj): Implement support for sources from remote MediaStreams.
215 NOTIMPLEMENTED(); 215 NOTIMPLEMENTED();
216 continue; 216 continue;
217 } 217 }
218 const bool is_screencast = (source_data->device_info().device.type == 218 const bool is_screencast = (source_data->device_info().device.type ==
219 content::MEDIA_TAB_VIDEO_CAPTURE); 219 content::MEDIA_TAB_VIDEO_CAPTURE);
220 source_data->SetVideoSource( 220 source_data->SetVideoSource(
221 CreateVideoSource(source_data->device_info().session_id, 221 CreateLocalVideoSource(source_data->device_info().session_id,
222 is_screencast, 222 is_screencast,
223 &native_video_constraints)); 223 &native_video_constraints));
224 source_observer->AddSource(source_data->video_source()); 224 source_observer->AddSource(source_data->video_source());
225 } 225 }
226 226
227 // Do additional source initialization if the audio source is a valid 227 // Do additional source initialization if the audio source is a valid
228 // microphone or tab audio. 228 // microphone or tab audio.
229 RTCMediaConstraints native_audio_constraints(audio_constraints);
229 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; 230 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
230 description->audioSources(audio_components); 231 description->audioSources(audio_components);
231 for (size_t i = 0; i < audio_components.size(); ++i) { 232 for (size_t i = 0; i < audio_components.size(); ++i) {
232 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); 233 const WebKit::WebMediaStreamSource& source = audio_components[i].source();
233 MediaStreamSourceExtraData* source_data = 234 MediaStreamSourceExtraData* source_data =
234 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 235 static_cast<MediaStreamSourceExtraData*>(source.extraData());
235 if (!source_data) { 236 if (!source_data) {
236 // TODO(henrika): Implement support for sources from remote MediaStreams. 237 // TODO(henrika): Implement support for sources from remote MediaStreams.
237 NOTIMPLEMENTED(); 238 NOTIMPLEMENTED();
238 continue; 239 continue;
239 } 240 }
240 241
241 const StreamDeviceInfo device_info = source_data->device_info(); 242 const StreamDeviceInfo device_info = source_data->device_info();
242 if (IsAudioMediaType(device_info.device.type)) { 243 if (IsAudioMediaType(device_info.device.type)) {
243 if (!InitializeAudioSource(device_info)) { 244 if (!InitializeAudioSource(device_info)) {
244 DLOG(WARNING) << "Unsupported audio source"; 245 DLOG(WARNING) << "Unsupported audio source";
245 sources_created.Run(description, false); 246 sources_created.Run(description, false);
246 return; 247 return;
247 } 248 }
248 } 249 }
250
251 // Creates a LocalAudioSource object which holds audio options.
252 source_data->SetLocalAudioSource(
253 CreateLocalAudioSource(&native_audio_constraints));
254 source_observer->AddSource(source_data->local_audio_source());
249 } 255 }
250 256
251 source_observer->StartObservering(); 257 source_observer->StartObservering();
252 } 258 }
253 259
254 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 260 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
255 WebKit::WebMediaStreamDescriptor* description) { 261 WebKit::WebMediaStreamDescriptor* description) {
256 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 262 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
257 if (!EnsurePeerConnectionFactory()) { 263 if (!EnsurePeerConnectionFactory()) {
258 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; 264 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
(...skipping 10 matching lines...) Expand all
269 275
270 for (size_t i = 0; i < audio_components.size(); ++i) { 276 for (size_t i = 0; i < audio_components.size(); ++i) {
271 WebKit::WebMediaStreamSource source = audio_components[i].source(); 277 WebKit::WebMediaStreamSource source = audio_components[i].source();
272 278
273 // See if we're adding a WebAudio MediaStream. 279 // See if we're adding a WebAudio MediaStream.
274 if (source.requiresAudioConsumer()) { 280 if (source.requiresAudioConsumer()) {
275 // TODO(crogers, xians): In reality we should be able to send a unique 281 // TODO(crogers, xians): In reality we should be able to send a unique
276 // audio stream to each PeerConnection separately. But currently WebRTC 282 // audio stream to each PeerConnection separately. But currently WebRTC
277 // is only able to handle a global audio stream sent to ALL peers. 283 // is only able to handle a global audio stream sent to ALL peers.
278 284
279 // TODO(henrika): refactor and utilize audio constraints. 285 // TODO(henrika): Refactor and utilize audio constraints. Audio
286 // constraints are passed via LocalAudioSource.
280 if (CreateWebAudioSource(&source)) { 287 if (CreateWebAudioSource(&source)) {
281 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( 288 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
282 CreateLocalAudioTrack(UTF16ToUTF8(audio_components[i].id()), NULL)); 289 CreateLocalAudioTrack(UTF16ToUTF8(audio_components[i].id()),
290 NULL));
283 native_stream->AddTrack(audio_track); 291 native_stream->AddTrack(audio_track);
284 audio_track->set_enabled(audio_components[i].isEnabled()); 292 audio_track->set_enabled(audio_components[i].isEnabled());
285 } else { 293 } else {
286 DLOG(WARNING) << "Failed to create WebAudio source"; 294 DLOG(WARNING) << "Failed to create WebAudio source";
287 } 295 }
288 } else { 296 } else {
289 MediaStreamSourceExtraData* source_data = 297 MediaStreamSourceExtraData* source_data =
290 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 298 static_cast<MediaStreamSourceExtraData*>(source.extraData());
291 299
292 if (!source_data) { 300 if (!source_data) {
293 // TODO(perkj): Implement support for sources from 301 // TODO(perkj): Implement support for sources from
294 // remote MediaStreams. 302 // remote MediaStreams.
295 NOTIMPLEMENTED(); 303 NOTIMPLEMENTED();
296 continue; 304 continue;
297 } 305 }
298 306
299 // TODO(perkj): Refactor the creation of audio tracks to use a proper
300 // interface for receiving audio input data. Currently NULL is passed
301 // since the |audio_device| is the wrong class and is unused.
302 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( 307 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
303 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); 308 CreateLocalAudioTrack(UTF16ToUTF8(source.id()),
309 source_data->local_audio_source()));
304 native_stream->AddTrack(audio_track); 310 native_stream->AddTrack(audio_track);
305 audio_track->set_enabled(audio_components[i].isEnabled()); 311 audio_track->set_enabled(audio_components[i].isEnabled());
306 } 312 }
307 } 313 }
308 314
309 // Add video tracks. 315 // Add video tracks.
310 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; 316 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components;
311 description->videoSources(video_components); 317 description->videoSources(video_components);
312 for (size_t i = 0; i < video_components.size(); ++i) { 318 for (size_t i = 0; i < video_components.size(); ++i) {
313 const WebKit::WebMediaStreamSource& source = video_components[i].source(); 319 const WebKit::WebMediaStreamSource& source = video_components[i].source();
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
379 return pc_factory_->CreatePeerConnection( 385 return pc_factory_->CreatePeerConnection(
380 ice_servers, constraints, pa_factory, observer).get(); 386 ice_servers, constraints, pa_factory, observer).get();
381 } 387 }
382 388
383 scoped_refptr<webrtc::LocalMediaStreamInterface> 389 scoped_refptr<webrtc::LocalMediaStreamInterface>
384 MediaStreamDependencyFactory::CreateLocalMediaStream( 390 MediaStreamDependencyFactory::CreateLocalMediaStream(
385 const std::string& label) { 391 const std::string& label) {
386 return pc_factory_->CreateLocalMediaStream(label).get(); 392 return pc_factory_->CreateLocalMediaStream(label).get();
387 } 393 }
388 394
395 scoped_refptr<webrtc::AudioSourceInterface>
396 MediaStreamDependencyFactory::CreateLocalAudioSource(
397 const webrtc::MediaConstraintsInterface* constraints) {
398 scoped_refptr<webrtc::AudioSourceInterface> source =
399 pc_factory_->CreateAudioSource(constraints).get();
400 return source;
401 }
402
389 scoped_refptr<webrtc::VideoSourceInterface> 403 scoped_refptr<webrtc::VideoSourceInterface>
390 MediaStreamDependencyFactory::CreateVideoSource( 404 MediaStreamDependencyFactory::CreateLocalVideoSource(
391 int video_session_id, 405 int video_session_id,
392 bool is_screencast, 406 bool is_screencast,
393 const webrtc::MediaConstraintsInterface* constraints) { 407 const webrtc::MediaConstraintsInterface* constraints) {
394 RtcVideoCapturer* capturer = new RtcVideoCapturer( 408 RtcVideoCapturer* capturer = new RtcVideoCapturer(
395 video_session_id, vc_manager_.get(), is_screencast); 409 video_session_id, vc_manager_.get(), is_screencast);
396 410
397 // The video source takes ownership of |capturer|. 411 // The video source takes ownership of |capturer|.
398 scoped_refptr<webrtc::VideoSourceInterface> source = 412 scoped_refptr<webrtc::VideoSourceInterface> source =
399 pc_factory_->CreateVideoSource(capturer, constraints).get(); 413 pc_factory_->CreateVideoSource(capturer, constraints).get();
400 return source; 414 return source;
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
452 scoped_refptr<webrtc::VideoTrackInterface> 466 scoped_refptr<webrtc::VideoTrackInterface>
453 MediaStreamDependencyFactory::CreateLocalVideoTrack( 467 MediaStreamDependencyFactory::CreateLocalVideoTrack(
454 const std::string& label, 468 const std::string& label,
455 webrtc::VideoSourceInterface* source) { 469 webrtc::VideoSourceInterface* source) {
456 return pc_factory_->CreateVideoTrack(label, source).get(); 470 return pc_factory_->CreateVideoTrack(label, source).get();
457 } 471 }
458 472
459 scoped_refptr<webrtc::LocalAudioTrackInterface> 473 scoped_refptr<webrtc::LocalAudioTrackInterface>
460 MediaStreamDependencyFactory::CreateLocalAudioTrack( 474 MediaStreamDependencyFactory::CreateLocalAudioTrack(
461 const std::string& label, 475 const std::string& label,
462 webrtc::AudioDeviceModule* audio_device) { 476 webrtc::AudioSourceInterface* source) {
463 return pc_factory_->CreateLocalAudioTrack(label, audio_device).get(); 477 return pc_factory_->CreateAudioTrack(label, source).get();
464 } 478 }
465 479
466 webrtc::SessionDescriptionInterface* 480 webrtc::SessionDescriptionInterface*
467 MediaStreamDependencyFactory::CreateSessionDescription(const std::string& type, 481 MediaStreamDependencyFactory::CreateSessionDescription(const std::string& type,
468 const std::string& sdp) { 482 const std::string& sdp) {
469 return webrtc::CreateSessionDescription(type, sdp); 483 return webrtc::CreateSessionDescription(type, sdp);
470 } 484 }
471 485
472 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate( 486 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate(
473 const std::string& sdp_mid, 487 const std::string& sdp_mid,
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
572 // processed before returning. We wait for the above task to finish before 586 // processed before returning. We wait for the above task to finish before
573 // letting the the function continue to avoid any potential race issues. 587 // letting the the function continue to avoid any potential race issues.
574 chrome_worker_thread_.Stop(); 588 chrome_worker_thread_.Stop();
575 } else { 589 } else {
576 NOTREACHED() << "Worker thread not running."; 590 NOTREACHED() << "Worker thread not running.";
577 } 591 }
578 } 592 }
579 } 593 }
580 594
581 } // namespace content 595 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698