OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/media_stream_dependency_factory.h" | 5 #include "content/renderer/media/media_stream_dependency_factory.h" |
6 | 6 |
7 #include <vector> | 7 #include <vector> |
8 | 8 |
9 #include "base/synchronization/waitable_event.h" | 9 #include "base/synchronization/waitable_event.h" |
10 #include "base/utf_string_conversions.h" | 10 #include "base/utf_string_conversions.h" |
11 #include "content/renderer/media/media_stream_extra_data.h" | 11 #include "content/renderer/media/media_stream_extra_data.h" |
12 #include "content/renderer/media/media_stream_source_extra_data.h" | 12 #include "content/renderer/media/media_stream_source_extra_data.h" |
| 13 #include "content/renderer/media/peer_connection_handler_jsep.h" |
| 14 #include "content/renderer/media/rtc_peer_connection_handler.h" |
13 #include "content/renderer/media/rtc_video_capturer.h" | 15 #include "content/renderer/media/rtc_video_capturer.h" |
14 #include "content/renderer/media/peer_connection_handler_jsep.h" | |
15 #include "content/renderer/media/video_capture_impl_manager.h" | 16 #include "content/renderer/media/video_capture_impl_manager.h" |
16 #include "content/renderer/media/webrtc_audio_device_impl.h" | 17 #include "content/renderer/media/webrtc_audio_device_impl.h" |
17 #include "content/renderer/media/webrtc_uma_histograms.h" | 18 #include "content/renderer/media/webrtc_uma_histograms.h" |
18 #include "content/renderer/p2p/ipc_network_manager.h" | 19 #include "content/renderer/p2p/ipc_network_manager.h" |
19 #include "content/renderer/p2p/ipc_socket_factory.h" | 20 #include "content/renderer/p2p/ipc_socket_factory.h" |
20 #include "content/renderer/p2p/port_allocator.h" | 21 #include "content/renderer/p2p/port_allocator.h" |
21 #include "jingle/glue/thread_wrapper.h" | 22 #include "jingle/glue/thread_wrapper.h" |
22 #include "third_party/WebKit/Source/WebKit/chromium/public/WebFrame.h" | 23 #include "third_party/WebKit/Source/WebKit/chromium/public/WebFrame.h" |
23 #include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStre
amComponent.h" | 24 #include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStre
amComponent.h" |
24 #include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStre
amDescriptor.h" | 25 #include "third_party/WebKit/Source/WebKit/chromium/public/platform/WebMediaStre
amDescriptor.h" |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
89 } | 90 } |
90 | 91 |
91 WebKit::WebPeerConnection00Handler* | 92 WebKit::WebPeerConnection00Handler* |
92 MediaStreamDependencyFactory::CreatePeerConnectionHandlerJsep( | 93 MediaStreamDependencyFactory::CreatePeerConnectionHandlerJsep( |
93 WebKit::WebPeerConnection00HandlerClient* client) { | 94 WebKit::WebPeerConnection00HandlerClient* client) { |
94 // Save histogram data so we can see how much PeerConnetion is used. | 95 // Save histogram data so we can see how much PeerConnetion is used. |
95 // The histogram counts the number of calls to the JS API | 96 // The histogram counts the number of calls to the JS API |
96 // webKitPeerConnection00. | 97 // webKitPeerConnection00. |
97 UpdateWebRTCMethodCount(WEBKIT_PEER_CONNECTION); | 98 UpdateWebRTCMethodCount(WEBKIT_PEER_CONNECTION); |
98 | 99 |
99 if (!EnsurePeerConnectionFactory()) { | 100 if (!EnsurePeerConnectionFactory()) |
100 return NULL; | 101 return NULL; |
101 } | |
102 | 102 |
103 return new PeerConnectionHandlerJsep(client, this); | 103 return new PeerConnectionHandlerJsep(client, this); |
104 } | 104 } |
105 | 105 |
| 106 WebKit::WebRTCPeerConnectionHandler* |
| 107 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( |
| 108 WebKit::WebRTCPeerConnectionHandlerClient* client) { |
| 109 // Save histogram data so we can see how much PeerConnetion is used. |
| 110 // The histogram counts the number of calls to the JS API |
| 111 // webKitRTCPeerConnection. |
| 112 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
| 113 |
| 114 if (!EnsurePeerConnectionFactory()) |
| 115 return NULL; |
| 116 |
| 117 return new RTCPeerConnectionHandler(client, this); |
| 118 } |
| 119 |
106 bool MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 120 bool MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
107 WebKit::WebMediaStreamDescriptor* description) { | 121 WebKit::WebMediaStreamDescriptor* description) { |
108 // Creating the peer connection factory can fail if for example the audio | 122 // Creating the peer connection factory can fail if for example the audio |
109 // (input or output) or video device cannot be opened. Handling such cases | 123 // (input or output) or video device cannot be opened. Handling such cases |
110 // better is a higher level design discussion which involves the media | 124 // better is a higher level design discussion which involves the media |
111 // manager, webrtc and libjingle. We cannot create any native | 125 // manager, webrtc and libjingle. We cannot create any native |
112 // track objects however, so we'll just have to skip that. Furthermore, | 126 // track objects however, so we'll just have to skip that. Furthermore, |
113 // creating a peer connection later on will fail if we don't have a factory. | 127 // creating a peer connection later on will fail if we don't have a factory. |
114 if (!EnsurePeerConnectionFactory()) | 128 if (!EnsurePeerConnectionFactory()) |
115 return false; | 129 return false; |
116 | 130 |
117 std::string label = UTF16ToUTF8(description->label()); | 131 std::string label = UTF16ToUTF8(description->label()); |
118 talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream = | 132 scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream = |
119 CreateLocalMediaStream(label); | 133 CreateLocalMediaStream(label); |
120 | 134 |
121 // Add audio tracks. | 135 // Add audio tracks. |
122 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; | 136 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; |
123 description->audioSources(audio_components); | 137 description->audioSources(audio_components); |
124 for (size_t i = 0; i < audio_components.size(); ++i) { | 138 for (size_t i = 0; i < audio_components.size(); ++i) { |
125 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); | 139 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); |
126 MediaStreamSourceExtraData* source_data = | 140 MediaStreamSourceExtraData* source_data = |
127 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 141 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
128 if (!source_data) { | 142 if (!source_data) { |
129 // TODO(perkj): Implement support for sources from remote MediaStreams. | 143 // TODO(perkj): Implement support for sources from remote MediaStreams. |
130 NOTIMPLEMENTED(); | 144 NOTIMPLEMENTED(); |
131 continue; | 145 continue; |
132 } | 146 } |
133 // TODO(perkj): Refactor the creation of audio tracks to use a proper | 147 // TODO(perkj): Refactor the creation of audio tracks to use a proper |
134 // interface for receiving audio input data. Currently NULL is passed since | 148 // interface for receiving audio input data. Currently NULL is passed since |
135 // the |audio_device| is the wrong class and is unused. | 149 // the |audio_device| is the wrong class and is unused. |
136 talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( | 150 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( |
137 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); | 151 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); |
138 native_stream->AddTrack(audio_track); | 152 native_stream->AddTrack(audio_track); |
139 audio_track->set_enabled(audio_components[i].isEnabled()); | 153 audio_track->set_enabled(audio_components[i].isEnabled()); |
140 // TODO(xians): This set the source of all audio tracks to the same | 154 // TODO(xians): This set the source of all audio tracks to the same |
141 // microphone. Implement support for setting the source per audio track | 155 // microphone. Implement support for setting the source per audio track |
142 // instead. | 156 // instead. |
143 SetAudioDeviceSessionId(source_data->device_info().session_id); | 157 SetAudioDeviceSessionId(source_data->device_info().session_id); |
144 } | 158 } |
145 | 159 |
146 // Add video tracks. | 160 // Add video tracks. |
147 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; | 161 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; |
148 description->videoSources(video_components); | 162 description->videoSources(video_components); |
149 for (size_t i = 0; i < video_components.size(); ++i) { | 163 for (size_t i = 0; i < video_components.size(); ++i) { |
150 const WebKit::WebMediaStreamSource& source = video_components[i].source(); | 164 const WebKit::WebMediaStreamSource& source = video_components[i].source(); |
151 MediaStreamSourceExtraData* source_data = | 165 MediaStreamSourceExtraData* source_data = |
152 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 166 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
153 if (!source_data) { | 167 if (!source_data) { |
154 // TODO(perkj): Implement support for sources from remote MediaStreams. | 168 // TODO(perkj): Implement support for sources from remote MediaStreams. |
155 NOTIMPLEMENTED(); | 169 NOTIMPLEMENTED(); |
156 continue; | 170 continue; |
157 } | 171 } |
158 talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track( | 172 scoped_refptr<webrtc::LocalVideoTrackInterface> video_track( |
159 CreateLocalVideoTrack(UTF16ToUTF8(source.id()), | 173 CreateLocalVideoTrack(UTF16ToUTF8(source.id()), |
160 source_data->device_info().session_id)); | 174 source_data->device_info().session_id)); |
161 native_stream->AddTrack(video_track); | 175 native_stream->AddTrack(video_track); |
162 video_track->set_enabled(video_components[i].isEnabled()); | 176 video_track->set_enabled(video_components[i].isEnabled()); |
163 } | 177 } |
164 | 178 |
165 description->setExtraData(new MediaStreamExtraData(native_stream)); | 179 description->setExtraData(new MediaStreamExtraData(native_stream)); |
166 return true; | 180 return true; |
167 } | 181 } |
168 | 182 |
169 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory( | 183 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory( |
170 talk_base::Thread* worker_thread, | 184 talk_base::Thread* worker_thread, |
171 talk_base::Thread* signaling_thread, | 185 talk_base::Thread* signaling_thread, |
172 content::P2PSocketDispatcher* socket_dispatcher, | 186 content::P2PSocketDispatcher* socket_dispatcher, |
173 talk_base::NetworkManager* network_manager, | 187 talk_base::NetworkManager* network_manager, |
174 talk_base::PacketSocketFactory* socket_factory) { | 188 talk_base::PacketSocketFactory* socket_factory) { |
175 if (!pc_factory_.get()) { | 189 if (!pc_factory_.get()) { |
176 talk_base::scoped_refptr<P2PPortAllocatorFactory> pa_factory = | 190 scoped_refptr<P2PPortAllocatorFactory> pa_factory = |
177 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( | 191 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( |
178 socket_dispatcher, | 192 socket_dispatcher, |
179 network_manager, | 193 network_manager, |
180 socket_factory); | 194 socket_factory); |
181 | 195 |
182 DCHECK(!audio_device_); | 196 DCHECK(!audio_device_); |
183 audio_device_ = new WebRtcAudioDeviceImpl(); | 197 audio_device_ = new WebRtcAudioDeviceImpl(); |
184 talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( | 198 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( |
185 webrtc::CreatePeerConnectionFactory(worker_thread, | 199 webrtc::CreatePeerConnectionFactory(worker_thread, |
186 signaling_thread, | 200 signaling_thread, |
187 pa_factory.release(), | 201 pa_factory.release(), |
188 audio_device_)); | 202 audio_device_)); |
189 if (factory.get()) | 203 if (factory.get()) |
190 pc_factory_ = factory.release(); | 204 pc_factory_ = factory.release(); |
191 } | 205 } |
192 return pc_factory_.get() != NULL; | 206 return pc_factory_.get() != NULL; |
193 } | 207 } |
194 | 208 |
195 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { | 209 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { |
196 return pc_factory_.get() != NULL; | 210 return pc_factory_.get() != NULL; |
197 } | 211 } |
198 | 212 |
199 talk_base::scoped_refptr<webrtc::PeerConnectionInterface> | 213 scoped_refptr<webrtc::PeerConnectionInterface> |
200 MediaStreamDependencyFactory::CreatePeerConnection( | 214 MediaStreamDependencyFactory::CreatePeerConnection( |
201 const std::string& config, | 215 const std::string& config, |
202 webrtc::PeerConnectionObserver* observer) { | 216 webrtc::PeerConnectionObserver* observer) { |
203 return pc_factory_->CreatePeerConnection(config, observer); | 217 return pc_factory_->CreatePeerConnection(config, observer).get(); |
204 } | 218 } |
205 | 219 |
206 talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface> | 220 scoped_refptr<webrtc::PeerConnectionInterface> |
| 221 MediaStreamDependencyFactory::CreatePeerConnection( |
| 222 const webrtc::JsepInterface::IceServers& ice_servers, |
| 223 const webrtc::MediaConstraintsInterface* constraints, |
| 224 webrtc::PeerConnectionObserver* observer) { |
| 225 return pc_factory_->CreatePeerConnection( |
| 226 ice_servers, constraints, observer).get(); |
| 227 } |
| 228 |
| 229 scoped_refptr<webrtc::LocalMediaStreamInterface> |
207 MediaStreamDependencyFactory::CreateLocalMediaStream( | 230 MediaStreamDependencyFactory::CreateLocalMediaStream( |
208 const std::string& label) { | 231 const std::string& label) { |
209 return pc_factory_->CreateLocalMediaStream(label); | 232 return pc_factory_->CreateLocalMediaStream(label).get(); |
210 } | 233 } |
211 | 234 |
212 talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> | 235 scoped_refptr<webrtc::LocalVideoTrackInterface> |
213 MediaStreamDependencyFactory::CreateLocalVideoTrack( | 236 MediaStreamDependencyFactory::CreateLocalVideoTrack( |
214 const std::string& label, | 237 const std::string& label, |
215 int video_session_id) { | 238 int video_session_id) { |
216 RtcVideoCapturer* capturer = new RtcVideoCapturer(video_session_id, | 239 RtcVideoCapturer* capturer = new RtcVideoCapturer(video_session_id, |
217 vc_manager_.get()); | 240 vc_manager_.get()); |
218 | 241 |
219 // The video track takes ownership of |capturer|. | 242 // The video track takes ownership of |capturer|. |
220 return pc_factory_->CreateLocalVideoTrack(label, | 243 return pc_factory_->CreateLocalVideoTrack(label, capturer).get(); |
221 capturer); | |
222 } | 244 } |
223 | 245 |
224 talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> | 246 scoped_refptr<webrtc::LocalAudioTrackInterface> |
225 MediaStreamDependencyFactory::CreateLocalAudioTrack( | 247 MediaStreamDependencyFactory::CreateLocalAudioTrack( |
226 const std::string& label, | 248 const std::string& label, |
227 webrtc::AudioDeviceModule* audio_device) { | 249 webrtc::AudioDeviceModule* audio_device) { |
228 return pc_factory_->CreateLocalAudioTrack(label, audio_device); | 250 return pc_factory_->CreateLocalAudioTrack(label, audio_device).get(); |
229 } | 251 } |
230 | 252 |
231 webrtc::SessionDescriptionInterface* | 253 webrtc::SessionDescriptionInterface* |
232 MediaStreamDependencyFactory::CreateSessionDescription(const std::string& sdp) { | 254 MediaStreamDependencyFactory::CreateSessionDescription(const std::string& sdp) { |
233 return webrtc::CreateSessionDescription(sdp); | 255 return webrtc::CreateSessionDescription(sdp); |
234 } | 256 } |
235 | 257 |
| 258 webrtc::SessionDescriptionInterface* |
| 259 MediaStreamDependencyFactory::CreateSessionDescription(const std::string& type, |
| 260 const std::string& sdp) { |
| 261 return webrtc::CreateSessionDescription(type, sdp); |
| 262 } |
| 263 |
236 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate( | 264 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate( |
237 const std::string& sdp_mid, | 265 const std::string& sdp_mid, |
238 int sdp_mline_index, | 266 int sdp_mline_index, |
239 const std::string& sdp) { | 267 const std::string& sdp) { |
240 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); | 268 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); |
241 } | 269 } |
242 | 270 |
243 void MediaStreamDependencyFactory::SetAudioDeviceSessionId(int session_id) { | 271 void MediaStreamDependencyFactory::SetAudioDeviceSessionId(int session_id) { |
244 audio_device_->SetSessionId(session_id); | 272 audio_device_->SetSessionId(session_id); |
245 } | 273 } |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
333 base::Unretained(this))); | 361 base::Unretained(this))); |
334 // Stopping the thread will wait until all tasks have been | 362 // Stopping the thread will wait until all tasks have been |
335 // processed before returning. We wait for the above task to finish before | 363 // processed before returning. We wait for the above task to finish before |
336 // letting the the function continue to avoid any potential race issues. | 364 // letting the the function continue to avoid any potential race issues. |
337 chrome_worker_thread_.Stop(); | 365 chrome_worker_thread_.Stop(); |
338 } else { | 366 } else { |
339 NOTREACHED() << "Worker thread not running."; | 367 NOTREACHED() << "Worker thread not running."; |
340 } | 368 } |
341 } | 369 } |
342 } | 370 } |
OLD | NEW |