Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(421)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 131763002: Adds MediaStreamSource, MediaStreamAudioSource and MediaStreamVideoCaptureDeviceSource (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Rebased Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/command_line.h" 9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h" 10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h" 11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h" 12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h" 13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream_audio_processor_options.h" 14 #include "content/renderer/media/media_stream_audio_processor_options.h"
15 #include "content/renderer/media/media_stream_source_extra_data.h" 15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_track_extra_data.h" 16 #include "content/renderer/media/media_stream_track_extra_data.h"
17 #include "content/renderer/media/media_stream_video_source.h"
17 #include "content/renderer/media/media_stream_video_track.h" 18 #include "content/renderer/media/media_stream_video_track.h"
18 #include "content/renderer/media/peer_connection_identity_service.h" 19 #include "content/renderer/media/peer_connection_identity_service.h"
19 #include "content/renderer/media/rtc_media_constraints.h" 20 #include "content/renderer/media/rtc_media_constraints.h"
20 #include "content/renderer/media/rtc_peer_connection_handler.h" 21 #include "content/renderer/media/rtc_peer_connection_handler.h"
21 #include "content/renderer/media/rtc_video_capturer.h" 22 #include "content/renderer/media/rtc_video_capturer.h"
22 #include "content/renderer/media/rtc_video_decoder_factory.h" 23 #include "content/renderer/media/rtc_video_decoder_factory.h"
23 #include "content/renderer/media/rtc_video_encoder_factory.h" 24 #include "content/renderer/media/rtc_video_encoder_factory.h"
24 #include "content/renderer/media/webaudio_capturer_source.h" 25 #include "content/renderer/media/webaudio_capturer_source.h"
25 #include "content/renderer/media/webrtc_audio_device_impl.h" 26 #include "content/renderer/media/webrtc_audio_device_impl.h"
26 #include "content/renderer/media/webrtc_local_audio_track.h" 27 #include "content/renderer/media/webrtc_local_audio_track.h"
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 private: 113 private:
113 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; 114 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
114 // |network_manager_| and |socket_factory_| are a weak references, owned by 115 // |network_manager_| and |socket_factory_| are a weak references, owned by
115 // MediaStreamDependencyFactory. 116 // MediaStreamDependencyFactory.
116 talk_base::NetworkManager* network_manager_; 117 talk_base::NetworkManager* network_manager_;
117 talk_base::PacketSocketFactory* socket_factory_; 118 talk_base::PacketSocketFactory* socket_factory_;
118 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. 119 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
119 blink::WebFrame* web_frame_; 120 blink::WebFrame* web_frame_;
120 }; 121 };
121 122
122 // SourceStateObserver is a help class used for observing the startup state
123 // transition of webrtc media sources such as a camera or microphone.
124 // An instance of the object deletes itself after use.
125 // Usage:
126 // 1. Create an instance of the object with the blink::WebMediaStream
127 // the observed sources belongs to a callback.
128 // 2. Add the sources to the observer using AddSource.
129 // 3. Call StartObserving()
130 // 4. The callback will be triggered when all sources have transitioned from
131 // webrtc::MediaSourceInterface::kInitializing.
132 class SourceStateObserver : public webrtc::ObserverInterface,
133 public base::NonThreadSafe {
134 public:
135 SourceStateObserver(
136 blink::WebMediaStream* web_stream,
137 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
138 : web_stream_(web_stream),
139 ready_callback_(callback),
140 live_(true) {
141 }
142
143 void AddSource(webrtc::MediaSourceInterface* source) {
144 DCHECK(CalledOnValidThread());
145 switch (source->state()) {
146 case webrtc::MediaSourceInterface::kInitializing:
147 sources_.push_back(source);
148 source->RegisterObserver(this);
149 break;
150 case webrtc::MediaSourceInterface::kLive:
151 // The source is already live so we don't need to wait for it.
152 break;
153 case webrtc::MediaSourceInterface::kEnded:
154 // The source have already failed.
155 live_ = false;
156 break;
157 default:
158 NOTREACHED();
159 }
160 }
161
162 void StartObservering() {
163 DCHECK(CalledOnValidThread());
164 CheckIfSourcesAreLive();
165 }
166
167 virtual void OnChanged() OVERRIDE {
168 DCHECK(CalledOnValidThread());
169 CheckIfSourcesAreLive();
170 }
171
172 private:
173 void CheckIfSourcesAreLive() {
174 ObservedSources::iterator it = sources_.begin();
175 while (it != sources_.end()) {
176 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
177 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
178 (*it)->UnregisterObserver(this);
179 it = sources_.erase(it);
180 } else {
181 ++it;
182 }
183 }
184 if (sources_.empty()) {
185 ready_callback_.Run(web_stream_, live_);
186 delete this;
187 }
188 }
189
190 blink::WebMediaStream* web_stream_;
191 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
192 bool live_;
193 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
194 ObservedSources;
195 ObservedSources sources_;
196 };
197
198 MediaStreamDependencyFactory::MediaStreamDependencyFactory( 123 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
199 P2PSocketDispatcher* p2p_socket_dispatcher) 124 P2PSocketDispatcher* p2p_socket_dispatcher)
200 : network_manager_(NULL), 125 : network_manager_(NULL),
201 p2p_socket_dispatcher_(p2p_socket_dispatcher), 126 p2p_socket_dispatcher_(p2p_socket_dispatcher),
202 signaling_thread_(NULL), 127 signaling_thread_(NULL),
203 worker_thread_(NULL), 128 worker_thread_(NULL),
204 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), 129 chrome_worker_thread_("Chrome_libJingle_WorkerThread"),
205 aec_dump_file_(base::kInvalidPlatformFileValue) { 130 aec_dump_file_(base::kInvalidPlatformFileValue) {
206 } 131 }
207 132
208 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { 133 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() {
209 CleanupPeerConnectionFactory(); 134 CleanupPeerConnectionFactory();
210 if (aec_dump_file_ != base::kInvalidPlatformFileValue) 135 if (aec_dump_file_ != base::kInvalidPlatformFileValue)
211 base::ClosePlatformFile(aec_dump_file_); 136 base::ClosePlatformFile(aec_dump_file_);
212 } 137 }
213 138
214 blink::WebRTCPeerConnectionHandler* 139 blink::WebRTCPeerConnectionHandler*
215 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( 140 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
216 blink::WebRTCPeerConnectionHandlerClient* client) { 141 blink::WebRTCPeerConnectionHandlerClient* client) {
217 // Save histogram data so we can see how much PeerConnetion is used. 142 // Save histogram data so we can see how much PeerConnetion is used.
218 // The histogram counts the number of calls to the JS API 143 // The histogram counts the number of calls to the JS API
219 // webKitRTCPeerConnection. 144 // webKitRTCPeerConnection.
220 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 145 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
221 146
222 if (!EnsurePeerConnectionFactory())
223 return NULL;
224
225 return new RTCPeerConnectionHandler(client, this); 147 return new RTCPeerConnectionHandler(client, this);
226 } 148 }
227 149
228 void MediaStreamDependencyFactory::CreateNativeMediaSources( 150 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
229 int render_view_id, 151 int render_view_id,
230 const blink::WebMediaConstraints& audio_constraints, 152 const blink::WebMediaConstraints& audio_constraints,
231 const blink::WebMediaConstraints& video_constraints, 153 MediaStreamAudioSource* source_data) {
232 blink::WebMediaStream* web_stream, 154 DVLOG(1) << "InitializeMediaStreamAudioSources()";
233 const MediaSourcesCreatedCallback& sources_created) {
234 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
235 if (!EnsurePeerConnectionFactory()) {
236 sources_created.Run(web_stream, false);
237 return;
238 }
239
240 // |source_observer| clean up itself when it has completed
241 // source_observer->StartObservering.
242 SourceStateObserver* source_observer =
243 new SourceStateObserver(web_stream, sources_created);
244
245 // Create local video sources.
246 RTCMediaConstraints native_video_constraints(video_constraints);
247 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
248 web_stream->videoTracks(video_tracks);
249 for (size_t i = 0; i < video_tracks.size(); ++i) {
250 const blink::WebMediaStreamSource& source = video_tracks[i].source();
251 MediaStreamSourceExtraData* source_data =
252 static_cast<MediaStreamSourceExtraData*>(source.extraData());
253
254 // Check if the source has already been created. This happens when the same
255 // source is used in multiple MediaStreams as a result of calling
256 // getUserMedia.
257 if (source_data->video_source())
258 continue;
259
260 const bool is_screencast =
261 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
262 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
263 source_data->SetVideoSource(
264 CreateLocalVideoSource(source_data->device_info().session_id,
265 is_screencast,
266 &native_video_constraints).get());
267 source_observer->AddSource(source_data->video_source());
268 }
269 155
270 // Do additional source initialization if the audio source is a valid 156 // Do additional source initialization if the audio source is a valid
271 // microphone or tab audio. 157 // microphone or tab audio.
272 RTCMediaConstraints native_audio_constraints(audio_constraints); 158 RTCMediaConstraints native_audio_constraints(audio_constraints);
273 ApplyFixedAudioConstraints(&native_audio_constraints); 159 ApplyFixedAudioConstraints(&native_audio_constraints);
274 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
275 web_stream->audioTracks(audio_tracks);
276 for (size_t i = 0; i < audio_tracks.size(); ++i) {
277 const blink::WebMediaStreamSource& source = audio_tracks[i].source();
278 MediaStreamSourceExtraData* source_data =
279 static_cast<MediaStreamSourceExtraData*>(source.extraData());
280 160
281 // Check if the source has already been created. This happens when the same 161 StreamDeviceInfo device_info = source_data->device_info();
282 // source is used in multiple MediaStreams as a result of calling 162 RTCMediaConstraints constraints = native_audio_constraints;
283 // getUserMedia.
284 if (source_data->local_audio_source())
285 continue;
286 163
287 // TODO(xians): Create a new capturer for difference microphones when we 164 // If any platform effects are available, check them against the
288 // support multiple microphones. See issue crbug/262117 . 165 // constraints. Disable effects to match false constraints, but if a
289 StreamDeviceInfo device_info = source_data->device_info(); 166 // constraint is true, set the constraint to false to later disable the
290 RTCMediaConstraints constraints = native_audio_constraints; 167 // software effect.
291 168 int effects = device_info.device.input.effects;
292 // If any platform effects are available, check them against the 169 if (effects != media::AudioParameters::NO_EFFECTS) {
293 // constraints. Disable effects to match false constraints, but if a 170 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
294 // constraint is true, set the constraint to false to later disable the 171 bool value;
295 // software effect. 172 if (!webrtc::FindConstraint(&constraints,
296 int effects = device_info.device.input.effects; 173 kConstraintEffectMap[i].constraint, &value,
297 if (effects != media::AudioParameters::NO_EFFECTS) { 174 NULL) || !value) {
298 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { 175 // If the constraint is false, or does not exist, disable the platform
299 bool value; 176 // effect.
300 if (!webrtc::FindConstraint(&constraints, 177 effects &= ~kConstraintEffectMap[i].effect;
301 kConstraintEffectMap[i].constraint, &value, NULL) || !value) { 178 DVLOG(1) << "Disabling constraint: "
302 // If the constraint is false, or does not exist, disable the platform 179 << kConstraintEffectMap[i].constraint;
303 // effect. 180 } else if (effects & kConstraintEffectMap[i].effect) {
304 effects &= ~kConstraintEffectMap[i].effect; 181 // If the constraint is true, leave the platform effect enabled, and
305 DVLOG(1) << "Disabling constraint: " 182 // set the constraint to false to later disable the software effect.
306 << kConstraintEffectMap[i].constraint; 183 constraints.AddMandatory(kConstraintEffectMap[i].constraint,
307 } else if (effects & kConstraintEffectMap[i].effect) { 184 webrtc::MediaConstraintsInterface::kValueFalse,
308 // If the constraint is true, leave the platform effect enabled, and 185 true);
309 // set the constraint to false to later disable the software effect. 186 DVLOG(1) << "Disabling platform effect: "
310 constraints.AddMandatory(kConstraintEffectMap[i].constraint, 187 << kConstraintEffectMap[i].constraint;
311 webrtc::MediaConstraintsInterface::kValueFalse, true);
312 DVLOG(1) << "Disabling platform effect: "
313 << kConstraintEffectMap[i].constraint;
314 }
315 } 188 }
316 device_info.device.input.effects = effects;
317 } 189 }
318 190 device_info.device.input.effects = effects;
319 scoped_refptr<WebRtcAudioCapturer> capturer(
320 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
321 if (!capturer.get()) {
322 DLOG(WARNING) << "Failed to create the capturer for device "
323 << device_info.device.id;
324 sources_created.Run(web_stream, false);
325 // TODO(xians): Don't we need to check if source_observer is observing
326 // something? If not, then it looks like we have a leak here.
327 // OTOH, if it _is_ observing something, then the callback might
328 // be called multiple times which is likely also a bug.
329 return;
330 }
331 source_data->SetAudioCapturer(capturer);
332
333 // Creates a LocalAudioSource object which holds audio options.
334 // TODO(xians): The option should apply to the track instead of the source.
335 source_data->SetLocalAudioSource(
336 CreateLocalAudioSource(&constraints).get());
337 source_observer->AddSource(source_data->local_audio_source());
338 } 191 }
339 192
340 source_observer->StartObservering(); 193 scoped_refptr<WebRtcAudioCapturer> capturer(
194 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
195 if (!capturer.get()) {
196 DLOG(WARNING) << "Failed to create the capturer for device "
197 << device_info.device.id;
198 // TODO(xians): Don't we need to check if source_observer is observing
199 // something? If not, then it looks like we have a leak here.
200 // OTOH, if it _is_ observing something, then the callback might
201 // be called multiple times which is likely also a bug.
202 return false;
203 }
204 source_data->SetAudioCapturer(capturer);
205
206 // Creates a LocalAudioSource object which holds audio options.
207 // TODO(xians): The option should apply to the track instead of the source.
208 // TODO(perkj): Move audio constraints parsing to Chrome.
209 // Currently there are a few constraints that are parsed by libjingle and
210 // the state is set to ended if parsing fails.
211 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
212 CreateLocalAudioSource(&constraints).get());
213 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
214 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
215 return false;
216 }
217 source_data->SetLocalAudioSource(rtc_source);
218 return true;
219 }
220
221 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
222 const StreamDeviceInfo& info) {
223 bool is_screeencast =
224 info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
225 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
226 return new RtcVideoCapturer(info.session_id, is_screeencast);
341 } 227 }
342 228
343 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 229 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
344 blink::WebMediaStream* web_stream) { 230 blink::WebMediaStream* web_stream) {
345 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 231 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
346 if (!EnsurePeerConnectionFactory()) {
347 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
348 return;
349 }
350 232
351 std::string label = base::UTF16ToUTF8(web_stream->id()); 233 std::string label = base::UTF16ToUTF8(web_stream->id());
352 scoped_refptr<webrtc::MediaStreamInterface> native_stream = 234 scoped_refptr<webrtc::MediaStreamInterface> native_stream =
353 CreateLocalMediaStream(label); 235 CreateLocalMediaStream(label);
354 MediaStreamExtraData* extra_data = 236 MediaStreamExtraData* extra_data =
355 new MediaStreamExtraData(native_stream.get(), true); 237 new MediaStreamExtraData(native_stream.get(), true);
356 web_stream->setExtraData(extra_data); 238 web_stream->setExtraData(extra_data);
357 239
358 // Add audio tracks. 240 // Add audio tracks.
359 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; 241 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
(...skipping 18 matching lines...) Expand all
378 MediaStreamExtraData* extra_data = 260 MediaStreamExtraData* extra_data =
379 static_cast<MediaStreamExtraData*>(web_stream->extraData()); 261 static_cast<MediaStreamExtraData*>(web_stream->extraData());
380 extra_data->SetLocalStreamStopCallback(stream_stop); 262 extra_data->SetLocalStreamStopCallback(stream_stop);
381 } 263 }
382 264
383 scoped_refptr<webrtc::AudioTrackInterface> 265 scoped_refptr<webrtc::AudioTrackInterface>
384 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( 266 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
385 const blink::WebMediaStreamTrack& track) { 267 const blink::WebMediaStreamTrack& track) {
386 blink::WebMediaStreamSource source = track.source(); 268 blink::WebMediaStreamSource source = track.source();
387 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 269 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
388 MediaStreamSourceExtraData* source_data = 270 MediaStreamAudioSource* source_data =
389 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 271 static_cast<MediaStreamAudioSource*>(source.extraData());
390 272
391 // In the future the constraints will belong to the track itself, but 273 // In the future the constraints will belong to the track itself, but
392 // right now they're on the source, so we fetch them from there. 274 // right now they're on the source, so we fetch them from there.
393 RTCMediaConstraints track_constraints(source.constraints()); 275 RTCMediaConstraints track_constraints(source.constraints());
394 276
395 // Apply default audio constraints that enable echo cancellation, 277 // Apply default audio constraints that enable echo cancellation,
396 // automatic gain control, noise suppression and high-pass filter. 278 // automatic gain control, noise suppression and high-pass filter.
397 ApplyFixedAudioConstraints(&track_constraints); 279 ApplyFixedAudioConstraints(&track_constraints);
398 280
399 scoped_refptr<WebAudioCapturerSource> webaudio_source; 281 scoped_refptr<WebAudioCapturerSource> webaudio_source;
400 if (!source_data) { 282 if (!source_data) {
401 if (source.requiresAudioConsumer()) { 283 if (source.requiresAudioConsumer()) {
402 // We're adding a WebAudio MediaStream. 284 // We're adding a WebAudio MediaStream.
403 // Create a specific capturer for each WebAudio consumer. 285 // Create a specific capturer for each WebAudio consumer.
404 webaudio_source = CreateWebAudioSource(&source, track_constraints); 286 webaudio_source = CreateWebAudioSource(&source, track_constraints);
405 source_data = 287 source_data =
406 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 288 static_cast<MediaStreamAudioSource*>(source.extraData());
407 } else { 289 } else {
408 // TODO(perkj): Implement support for sources from 290 // TODO(perkj): Implement support for sources from
409 // remote MediaStreams. 291 // remote MediaStreams.
410 NOTIMPLEMENTED(); 292 NOTIMPLEMENTED();
411 return NULL; 293 return NULL;
412 } 294 }
413 } 295 }
414 296
415 scoped_refptr<webrtc::AudioTrackInterface> audio_track( 297 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
416 CreateLocalAudioTrack(track.id().utf8(), 298 CreateLocalAudioTrack(track.id().utf8(),
417 source_data->GetAudioCapturer(), 299 source_data->GetAudioCapturer(),
418 webaudio_source.get(), 300 webaudio_source.get(),
419 source_data->local_audio_source())); 301 source_data->local_audio_source()));
420 AddNativeTrackToBlinkTrack(audio_track.get(), track, true); 302 AddNativeTrackToBlinkTrack(audio_track.get(), track, true);
421 303
422 audio_track->set_enabled(track.isEnabled()); 304 audio_track->set_enabled(track.isEnabled());
423 305
424 // Pass the pointer of the source provider to the blink audio track. 306 // Pass the pointer of the source provider to the blink audio track.
425 blink::WebMediaStreamTrack writable_track = track; 307 blink::WebMediaStreamTrack writable_track = track;
426 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( 308 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>(
427 audio_track.get())->audio_source_provider()); 309 audio_track.get())->audio_source_provider());
428 310
429 return audio_track; 311 return audio_track;
430 } 312 }
431 313
432 scoped_refptr<webrtc::VideoTrackInterface> 314 scoped_refptr<webrtc::VideoTrackInterface>
433 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( 315 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
434 const blink::WebMediaStreamTrack& track) { 316 const blink::WebMediaStreamTrack& track) {
317 DCHECK(track.extraData() == NULL);
435 blink::WebMediaStreamSource source = track.source(); 318 blink::WebMediaStreamSource source = track.source();
436 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); 319 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
437 MediaStreamSourceExtraData* source_data = 320
438 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 321 MediaStreamVideoSource* source_data =
322 static_cast<MediaStreamVideoSource*>(source.extraData());
439 323
440 if (!source_data) { 324 if (!source_data) {
441 // TODO(perkj): Implement support for sources from 325 // TODO(perkj): Implement support for sources from
442 // remote MediaStreams. 326 // remote MediaStreams.
443 NOTIMPLEMENTED(); 327 NOTIMPLEMENTED();
444 return NULL; 328 return NULL;
445 } 329 }
446 330
447 std::string track_id = base::UTF16ToUTF8(track.id()); 331 // Create native track from the source.
448 scoped_refptr<webrtc::VideoTrackInterface> video_track( 332 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
449 CreateLocalVideoTrack(track_id, source_data->video_source())); 333 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter());
450 AddNativeTrackToBlinkTrack(video_track.get(), track, true);
451 334
452 video_track->set_enabled(track.isEnabled()); 335 bool local_track = true;
336 AddNativeTrackToBlinkTrack(webrtc_track, track, local_track);
453 337
454 return video_track; 338 webrtc_track->set_enabled(track.isEnabled());
339
340 return webrtc_track;
455 } 341 }
456 342
457 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( 343 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
458 const blink::WebMediaStreamTrack& track) { 344 const blink::WebMediaStreamTrack& track) {
459 DCHECK(!track.isNull() && !track.extraData()); 345 DCHECK(!track.isNull() && !track.extraData());
460 DCHECK(!track.source().isNull()); 346 DCHECK(!track.source().isNull());
461 347
462 switch (track.source().type()) { 348 switch (track.source().type()) {
463 case blink::WebMediaStreamSource::TypeAudio: 349 case blink::WebMediaStreamSource::TypeAudio:
464 CreateNativeAudioMediaStreamTrack(track); 350 CreateNativeAudioMediaStreamTrack(track);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
556 native_stream->FindVideoTrack(track_id)); 442 native_stream->FindVideoTrack(track_id));
557 } 443 }
558 return false; 444 return false;
559 } 445 }
560 446
561 scoped_refptr<webrtc::VideoSourceInterface> 447 scoped_refptr<webrtc::VideoSourceInterface>
562 MediaStreamDependencyFactory::CreateVideoSource( 448 MediaStreamDependencyFactory::CreateVideoSource(
563 cricket::VideoCapturer* capturer, 449 cricket::VideoCapturer* capturer,
564 const webrtc::MediaConstraintsInterface* constraints) { 450 const webrtc::MediaConstraintsInterface* constraints) {
565 scoped_refptr<webrtc::VideoSourceInterface> source = 451 scoped_refptr<webrtc::VideoSourceInterface> source =
566 pc_factory_->CreateVideoSource(capturer, constraints).get(); 452 GetPcFactory()->CreateVideoSource(capturer, constraints).get();
567 return source; 453 return source;
568 } 454 }
569 455
570 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 456 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
457 MediaStreamDependencyFactory::GetPcFactory() {
458 if (!pc_factory_)
459 CreatePeerConnectionFactory();
460 CHECK(pc_factory_);
461 return pc_factory_;
462 }
463
464 void MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
571 DCHECK(!pc_factory_.get()); 465 DCHECK(!pc_factory_.get());
572 DCHECK(!audio_device_.get()); 466 DCHECK(!audio_device_.get());
467 DCHECK(!signaling_thread_);
468 DCHECK(!worker_thread_);
469 DCHECK(!network_manager_);
470 DCHECK(!socket_factory_);
471 DCHECK(!chrome_worker_thread_.IsRunning());
472
573 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; 473 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
574 474
475 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
476 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
477 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
478 CHECK(signaling_thread_);
479
480 chrome_worker_thread_.Start();
481
482 base::WaitableEvent start_worker_event(true, false);
483 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
484 &MediaStreamDependencyFactory::InitializeWorkerThread,
485 base::Unretained(this),
486 &worker_thread_,
487 &start_worker_event));
488 start_worker_event.Wait();
489 CHECK(worker_thread_);
490
491 base::WaitableEvent create_network_manager_event(true, false);
492 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
493 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
494 base::Unretained(this),
495 &create_network_manager_event));
496 create_network_manager_event.Wait();
497
498 socket_factory_.reset(
499 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
500
501 // Init SSL, which will be needed by PeerConnection.
502 #if defined(USE_OPENSSL)
503 if (!talk_base::InitializeSSL()) {
504 LOG(ERROR) << "Failed on InitializeSSL.";
505 NOTREACHED();
506 return;
507 }
508 #else
509 // TODO(ronghuawu): Replace this call with InitializeSSL.
510 net::EnsureNSSSSLInit();
511 #endif
512
575 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; 513 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
576 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; 514 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
577 515
578 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); 516 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
579 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories = 517 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories =
580 RenderThreadImpl::current()->GetGpuFactories(); 518 RenderThreadImpl::current()->GetGpuFactories();
581 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) { 519 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
582 if (gpu_factories) 520 if (gpu_factories)
583 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories)); 521 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
584 } 522 }
(...skipping 12 matching lines...) Expand all
597 535
598 scoped_refptr<WebRtcAudioDeviceImpl> audio_device( 536 scoped_refptr<WebRtcAudioDeviceImpl> audio_device(
599 new WebRtcAudioDeviceImpl()); 537 new WebRtcAudioDeviceImpl());
600 538
601 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( 539 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
602 webrtc::CreatePeerConnectionFactory(worker_thread_, 540 webrtc::CreatePeerConnectionFactory(worker_thread_,
603 signaling_thread_, 541 signaling_thread_,
604 audio_device.get(), 542 audio_device.get(),
605 encoder_factory.release(), 543 encoder_factory.release(),
606 decoder_factory.release())); 544 decoder_factory.release()));
607 if (!factory.get()) { 545 CHECK(factory);
608 return false;
609 }
610 546
611 audio_device_ = audio_device; 547 audio_device_ = audio_device;
612 pc_factory_ = factory; 548 pc_factory_ = factory;
613 webrtc::PeerConnectionFactoryInterface::Options factory_options; 549 webrtc::PeerConnectionFactoryInterface::Options factory_options;
614 factory_options.disable_sctp_data_channels = 550 factory_options.disable_sctp_data_channels =
615 cmd_line->HasSwitch(switches::kDisableSCTPDataChannels); 551 cmd_line->HasSwitch(switches::kDisableSCTPDataChannels);
616 factory_options.disable_encryption = 552 factory_options.disable_encryption =
617 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption); 553 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
618 pc_factory_->SetOptions(factory_options); 554 pc_factory_->SetOptions(factory_options);
619 555
620 // |aec_dump_file| will be invalid when dump is not enabled. 556 // |aec_dump_file| will be invalid when dump is not enabled.
621 if (aec_dump_file_ != base::kInvalidPlatformFileValue) { 557 if (aec_dump_file_ != base::kInvalidPlatformFileValue) {
622 StartAecDump(aec_dump_file_); 558 StartAecDump(aec_dump_file_);
623 aec_dump_file_ = base::kInvalidPlatformFileValue; 559 aec_dump_file_ = base::kInvalidPlatformFileValue;
624 } 560 }
625
626 return true;
627 } 561 }
628 562
629 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { 563 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
630 return pc_factory_.get() != NULL; 564 return pc_factory_.get() != NULL;
631 } 565 }
632 566
633 scoped_refptr<webrtc::PeerConnectionInterface> 567 scoped_refptr<webrtc::PeerConnectionInterface>
634 MediaStreamDependencyFactory::CreatePeerConnection( 568 MediaStreamDependencyFactory::CreatePeerConnection(
635 const webrtc::PeerConnectionInterface::IceServers& ice_servers, 569 const webrtc::PeerConnectionInterface::IceServers& ice_servers,
636 const webrtc::MediaConstraintsInterface* constraints, 570 const webrtc::MediaConstraintsInterface* constraints,
637 blink::WebFrame* web_frame, 571 blink::WebFrame* web_frame,
638 webrtc::PeerConnectionObserver* observer) { 572 webrtc::PeerConnectionObserver* observer) {
639 CHECK(web_frame); 573 CHECK(web_frame);
640 CHECK(observer); 574 CHECK(observer);
575 if (!GetPcFactory())
576 return NULL;
641 577
642 scoped_refptr<P2PPortAllocatorFactory> pa_factory = 578 scoped_refptr<P2PPortAllocatorFactory> pa_factory =
643 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( 579 new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
644 p2p_socket_dispatcher_.get(), 580 p2p_socket_dispatcher_.get(),
645 network_manager_, 581 network_manager_,
646 socket_factory_.get(), 582 socket_factory_.get(),
647 web_frame); 583 web_frame);
648 584
649 PeerConnectionIdentityService* identity_service = 585 PeerConnectionIdentityService* identity_service =
650 new PeerConnectionIdentityService( 586 new PeerConnectionIdentityService(
651 GURL(web_frame->document().url().spec()).GetOrigin()); 587 GURL(web_frame->document().url().spec()).GetOrigin());
652 588
653 return pc_factory_->CreatePeerConnection(ice_servers, 589 return GetPcFactory()->CreatePeerConnection(ice_servers,
654 constraints, 590 constraints,
655 pa_factory.get(), 591 pa_factory.get(),
656 identity_service, 592 identity_service,
657 observer).get(); 593 observer).get();
658 } 594 }
659 595
660 scoped_refptr<webrtc::MediaStreamInterface> 596 scoped_refptr<webrtc::MediaStreamInterface>
661 MediaStreamDependencyFactory::CreateLocalMediaStream( 597 MediaStreamDependencyFactory::CreateLocalMediaStream(
662 const std::string& label) { 598 const std::string& label) {
663 return pc_factory_->CreateLocalMediaStream(label).get(); 599 return GetPcFactory()->CreateLocalMediaStream(label).get();
664 } 600 }
665 601
666 scoped_refptr<webrtc::AudioSourceInterface> 602 scoped_refptr<webrtc::AudioSourceInterface>
667 MediaStreamDependencyFactory::CreateLocalAudioSource( 603 MediaStreamDependencyFactory::CreateLocalAudioSource(
668 const webrtc::MediaConstraintsInterface* constraints) { 604 const webrtc::MediaConstraintsInterface* constraints) {
669 scoped_refptr<webrtc::AudioSourceInterface> source = 605 scoped_refptr<webrtc::AudioSourceInterface> source =
670 pc_factory_->CreateAudioSource(constraints).get(); 606 GetPcFactory()->CreateAudioSource(constraints).get();
671 return source; 607 return source;
672 } 608 }
673 609
674 scoped_refptr<webrtc::VideoSourceInterface>
675 MediaStreamDependencyFactory::CreateLocalVideoSource(
676 int video_session_id,
677 bool is_screencast,
678 const webrtc::MediaConstraintsInterface* constraints) {
679 RtcVideoCapturer* capturer = new RtcVideoCapturer(
680 video_session_id, is_screencast);
681
682 // The video source takes ownership of |capturer|.
683 scoped_refptr<webrtc::VideoSourceInterface> source =
684 CreateVideoSource(capturer, constraints);
685 return source;
686 }
687
688 scoped_refptr<WebAudioCapturerSource> 610 scoped_refptr<WebAudioCapturerSource>
689 MediaStreamDependencyFactory::CreateWebAudioSource( 611 MediaStreamDependencyFactory::CreateWebAudioSource(
690 blink::WebMediaStreamSource* source, 612 blink::WebMediaStreamSource* source,
691 const RTCMediaConstraints& constraints) { 613 const RTCMediaConstraints& constraints) {
692 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; 614 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
693 DCHECK(GetWebRtcAudioDevice()); 615 DCHECK(GetWebRtcAudioDevice());
694 616
695 scoped_refptr<WebAudioCapturerSource> 617 scoped_refptr<WebAudioCapturerSource>
696 webaudio_capturer_source(new WebAudioCapturerSource()); 618 webaudio_capturer_source(new WebAudioCapturerSource());
697 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); 619 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
698 620
699 // Create a LocalAudioSource object which holds audio options. 621 // Create a LocalAudioSource object which holds audio options.
700 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 622 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
701 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); 623 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get());
702 source->setExtraData(source_data); 624 source->setExtraData(source_data);
703 625
704 // Replace the default source with WebAudio as source instead. 626 // Replace the default source with WebAudio as source instead.
705 source->addAudioConsumer(webaudio_capturer_source.get()); 627 source->addAudioConsumer(webaudio_capturer_source.get());
706 628
707 return webaudio_capturer_source; 629 return webaudio_capturer_source;
708 } 630 }
709 631
710 scoped_refptr<webrtc::VideoTrackInterface> 632 scoped_refptr<webrtc::VideoTrackInterface>
711 MediaStreamDependencyFactory::CreateLocalVideoTrack( 633 MediaStreamDependencyFactory::CreateLocalVideoTrack(
712 const std::string& id, 634 const std::string& id,
713 webrtc::VideoSourceInterface* source) { 635 webrtc::VideoSourceInterface* source) {
714 return pc_factory_->CreateVideoTrack(id, source).get(); 636 return GetPcFactory()->CreateVideoTrack(id, source).get();
715 } 637 }
716 638
717 scoped_refptr<webrtc::VideoTrackInterface> 639 scoped_refptr<webrtc::VideoTrackInterface>
718 MediaStreamDependencyFactory::CreateLocalVideoTrack( 640 MediaStreamDependencyFactory::CreateLocalVideoTrack(
719 const std::string& id, cricket::VideoCapturer* capturer) { 641 const std::string& id, cricket::VideoCapturer* capturer) {
720 if (!capturer) { 642 if (!capturer) {
721 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer."; 643 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
722 return NULL; 644 return NULL;
723 } 645 }
724 646
725 // Create video source from the |capturer|. 647 // Create video source from the |capturer|.
726 scoped_refptr<webrtc::VideoSourceInterface> source = 648 scoped_refptr<webrtc::VideoSourceInterface> source =
727 CreateVideoSource(capturer, NULL); 649 CreateVideoSource(capturer, NULL);
728 650
729 // Create native track from the source. 651 // Create native track from the source.
730 return pc_factory_->CreateVideoTrack(id, source.get()).get(); 652 return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
731 } 653 }
732 654
733 scoped_refptr<webrtc::AudioTrackInterface> 655 scoped_refptr<webrtc::AudioTrackInterface>
734 MediaStreamDependencyFactory::CreateLocalAudioTrack( 656 MediaStreamDependencyFactory::CreateLocalAudioTrack(
735 const std::string& id, 657 const std::string& id,
736 const scoped_refptr<WebRtcAudioCapturer>& capturer, 658 const scoped_refptr<WebRtcAudioCapturer>& capturer,
737 WebAudioCapturerSource* webaudio_source, 659 WebAudioCapturerSource* webaudio_source,
738 webrtc::AudioSourceInterface* source) { 660 webrtc::AudioSourceInterface* source) {
739 // TODO(xians): Merge |source| to the capturer(). We can't do this today 661 // TODO(xians): Merge |source| to the capturer(). We can't do this today
740 // because only one capturer() is supported while one |source| is created 662 // because only one capturer() is supported while one |source| is created
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
786 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get()); 708 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
787 event->Signal(); 709 event->Signal();
788 } 710 }
789 711
790 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { 712 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() {
791 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); 713 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
792 delete network_manager_; 714 delete network_manager_;
793 network_manager_ = NULL; 715 network_manager_ = NULL;
794 } 716 }
795 717
796 bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
797 DCHECK(CalledOnValidThread());
798 if (PeerConnectionFactoryCreated())
799 return true;
800
801 if (!signaling_thread_) {
802 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
803 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
804 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
805 CHECK(signaling_thread_);
806 }
807
808 if (!worker_thread_) {
809 if (!chrome_worker_thread_.IsRunning()) {
810 if (!chrome_worker_thread_.Start()) {
811 LOG(ERROR) << "Could not start worker thread";
812 signaling_thread_ = NULL;
813 return false;
814 }
815 }
816 base::WaitableEvent event(true, false);
817 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
818 &MediaStreamDependencyFactory::InitializeWorkerThread,
819 base::Unretained(this),
820 &worker_thread_,
821 &event));
822 event.Wait();
823 DCHECK(worker_thread_);
824 }
825
826 if (!network_manager_) {
827 base::WaitableEvent event(true, false);
828 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
829 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
830 base::Unretained(this),
831 &event));
832 event.Wait();
833 }
834
835 if (!socket_factory_) {
836 socket_factory_.reset(
837 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
838 }
839
840 // Init SSL, which will be needed by PeerConnection.
841 #if defined(USE_OPENSSL)
842 if (!talk_base::InitializeSSL()) {
843 LOG(ERROR) << "Failed on InitializeSSL.";
844 return false;
845 }
846 #else
847 // TODO(ronghuawu): Replace this call with InitializeSSL.
848 net::EnsureNSSSSLInit();
849 #endif
850
851 if (!CreatePeerConnectionFactory()) {
852 LOG(ERROR) << "Could not create PeerConnection factory";
853 return false;
854 }
855 return true;
856 }
857
858 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { 718 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() {
859 pc_factory_ = NULL; 719 pc_factory_ = NULL;
860 if (network_manager_) { 720 if (network_manager_) {
861 // The network manager needs to free its resources on the thread they were 721 // The network manager needs to free its resources on the thread they were
862 // created, which is the worked thread. 722 // created, which is the worked thread.
863 if (chrome_worker_thread_.IsRunning()) { 723 if (chrome_worker_thread_.IsRunning()) {
864 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 724 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
865 &MediaStreamDependencyFactory::DeleteIpcNetworkManager, 725 &MediaStreamDependencyFactory::DeleteIpcNetworkManager,
866 base::Unretained(this))); 726 base::Unretained(this)));
867 // Stopping the thread will wait until all tasks have been 727 // Stopping the thread will wait until all tasks have been
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
953 void MediaStreamDependencyFactory::OnDisableAecDump() { 813 void MediaStreamDependencyFactory::OnDisableAecDump() {
954 if (aec_dump_file_ != base::kInvalidPlatformFileValue) 814 if (aec_dump_file_ != base::kInvalidPlatformFileValue)
955 base::ClosePlatformFile(aec_dump_file_); 815 base::ClosePlatformFile(aec_dump_file_);
956 aec_dump_file_ = base::kInvalidPlatformFileValue; 816 aec_dump_file_ = base::kInvalidPlatformFileValue;
957 } 817 }
958 818
959 void MediaStreamDependencyFactory::StartAecDump( 819 void MediaStreamDependencyFactory::StartAecDump(
960 const base::PlatformFile& aec_dump_file) { 820 const base::PlatformFile& aec_dump_file) {
961 // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump() 821 // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
962 // fails, |aec_dump_file| will be closed. 822 // fails, |aec_dump_file| will be closed.
963 if (!pc_factory_->StartAecDump(aec_dump_file)) 823 if (!GetPcFactory()->StartAecDump(aec_dump_file))
964 VLOG(1) << "Could not start AEC dump."; 824 VLOG(1) << "Could not start AEC dump.";
965 } 825 }
966 826
967 } // namespace content 827 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698