Index: content/renderer/media/webrtc_audio_device_unittest.cc |
diff --git a/content/renderer/media/webrtc_audio_device_unittest.cc b/content/renderer/media/webrtc_audio_device_unittest.cc |
index 7074c95bb09c3e28966169cb62e91300918c89ec..ac41e4f4b00d9e88eb39d179ba0126a14df84a1a 100644 |
--- a/content/renderer/media/webrtc_audio_device_unittest.cc |
+++ b/content/renderer/media/webrtc_audio_device_unittest.cc |
@@ -496,9 +496,14 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, Construct) { |
// be utilized to implement the actual audio path. The test registers a |
// webrtc::VoEExternalMedia implementation to hijack the output audio and |
// verify that streaming starts correctly. |
-// Disabled when running headless since the bots don't have the required config. |
-// Flaky, http://crbug.com/167299 . |
-TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_StartPlayout) { |
+// TODO(henrika): include on Android as well as soon as alla race conditions |
+// in OpenSLES are resolved. |
+#if defined(OS_ANDROID) |
+#define MAYBE_StartPlayout DISABLED_StartPlayout |
+#else |
+#define MAYBE_StartPlayout StartPlayout |
+#endif |
+TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_StartPlayout) { |
if (!has_output_devices_) { |
LOG(WARNING) << "No output device detected."; |
return; |
@@ -507,12 +512,13 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_StartPlayout) { |
scoped_ptr<media::AudioHardwareConfig> config = |
CreateRealHardwareConfig(audio_manager_.get()); |
SetAudioHardwareConfig(config.get()); |
+ media::AudioParameters params(config->GetOutputConfig()); |
if (!HardwareSampleRatesAreValid()) |
return; |
EXPECT_CALL(media_observer(), |
- OnSetAudioStreamStatus(_, 1, StrEq("created"))).Times(1); |
+ OnAudioStreamCreated(_, 1, params, StrEq(""))).Times(1); |
EXPECT_CALL(media_observer(), |
OnSetAudioStreamPlaying(_, 1, true)).Times(1); |
EXPECT_CALL(media_observer(), |
@@ -520,33 +526,30 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_StartPlayout) { |
EXPECT_CALL(media_observer(), |
OnDeleteAudioStream(_, 1)).Times(AnyNumber()); |
- scoped_refptr<WebRtcAudioRenderer> renderer = |
- new WebRtcAudioRenderer(kRenderViewId); |
- scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
- new WebRtcAudioDeviceImpl()); |
- EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
- |
WebRTCAutoDelete<webrtc::VoiceEngine> engine(webrtc::VoiceEngine::Create()); |
ASSERT_TRUE(engine.valid()); |
- |
ScopedWebRTCPtr<webrtc::VoEBase> base(engine.get()); |
ASSERT_TRUE(base.valid()); |
+ |
+ scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
+ new WebRtcAudioDeviceImpl()); |
int err = base->Init(webrtc_audio_device.get()); |
ASSERT_EQ(0, err); |
- int ch = base->CreateChannel(); |
- EXPECT_NE(-1, ch); |
- |
ScopedWebRTCPtr<webrtc::VoEExternalMedia> external_media(engine.get()); |
ASSERT_TRUE(external_media.valid()); |
- |
base::WaitableEvent event(false, false); |
scoped_ptr<WebRTCMediaProcessImpl> media_process( |
new WebRTCMediaProcessImpl(&event)); |
+ int ch = base->CreateChannel(); |
+ EXPECT_NE(-1, ch); |
EXPECT_EQ(0, external_media->RegisterExternalMediaProcessing( |
ch, webrtc::kPlaybackPerChannel, *media_process.get())); |
EXPECT_EQ(0, base->StartPlayout(ch)); |
+ scoped_refptr<WebRtcAudioRenderer> renderer = |
+ new WebRtcAudioRenderer(kRenderViewId); |
+ EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
renderer->Play(); |
EXPECT_TRUE(event.TimedWait(TestTimeouts::action_timeout())); |
@@ -674,9 +677,14 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_StartRecording) { |
} |
// Uses WebRtcAudioDeviceImpl to play a local wave file. |
-// Disabled when running headless since the bots don't have the required config. |
-// Flaky, http://crbug.com/167298 . |
-TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_PlayLocalFile) { |
+// TODO(henrika): include on Android as well as soon as alla race conditions |
+// in OpenSLES are resolved. |
+#if defined(OS_ANDROID) |
+#define MAYBE_PlayLocalFile DISABLED_PlayLocalFile |
+#else |
+#define MAYBE_PlayLocalFile PlayLocalFile |
+#endif |
+TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_PlayLocalFile) { |
if (!has_output_devices_) { |
LOG(WARNING) << "No output device detected."; |
return; |
@@ -688,12 +696,13 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_PlayLocalFile) { |
scoped_ptr<media::AudioHardwareConfig> config = |
CreateRealHardwareConfig(audio_manager_.get()); |
SetAudioHardwareConfig(config.get()); |
+ media::AudioParameters params(config->GetOutputConfig()); |
if (!HardwareSampleRatesAreValid()) |
return; |
EXPECT_CALL(media_observer(), |
- OnSetAudioStreamStatus(_, 1, StrEq("created"))).Times(1); |
+ OnAudioStreamCreated(_, 1, params, StrEq(""))).Times(1); |
EXPECT_CALL(media_observer(), |
OnSetAudioStreamPlaying(_, 1, true)).Times(1); |
EXPECT_CALL(media_observer(), |
@@ -701,23 +710,21 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_PlayLocalFile) { |
EXPECT_CALL(media_observer(), |
OnDeleteAudioStream(_, 1)).Times(AnyNumber()); |
- scoped_refptr<WebRtcAudioRenderer> renderer = |
- new WebRtcAudioRenderer(kRenderViewId); |
- scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
- new WebRtcAudioDeviceImpl()); |
- EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
- |
WebRTCAutoDelete<webrtc::VoiceEngine> engine(webrtc::VoiceEngine::Create()); |
ASSERT_TRUE(engine.valid()); |
- |
ScopedWebRTCPtr<webrtc::VoEBase> base(engine.get()); |
ASSERT_TRUE(base.valid()); |
+ |
+ scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
+ new WebRtcAudioDeviceImpl()); |
int err = base->Init(webrtc_audio_device.get()); |
ASSERT_EQ(0, err); |
- |
int ch = base->CreateChannel(); |
EXPECT_NE(-1, ch); |
EXPECT_EQ(0, base->StartPlayout(ch)); |
+ scoped_refptr<WebRtcAudioRenderer> renderer = |
+ new WebRtcAudioRenderer(kRenderViewId); |
+ EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
renderer->Play(); |
ScopedWebRTCPtr<webrtc::VoEFile> file(engine.get()); |
@@ -733,7 +740,7 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_PlayLocalFile) { |
// Play 2 seconds worth of audio and then quit. |
message_loop_.PostDelayedTask(FROM_HERE, |
base::MessageLoop::QuitClosure(), |
- base::TimeDelta::FromSeconds(6)); |
+ base::TimeDelta::FromSeconds(2)); |
message_loop_.Run(); |
renderer->Stop(); |
@@ -767,28 +774,27 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_FullDuplexAudioWithAGC) { |
scoped_ptr<media::AudioHardwareConfig> config = |
CreateRealHardwareConfig(audio_manager_.get()); |
SetAudioHardwareConfig(config.get()); |
+ media::AudioParameters params(config->GetOutputConfig()); |
if (!HardwareSampleRatesAreValid()) |
return; |
EXPECT_CALL(media_observer(), |
+ OnAudioStreamCreated(_, 1, params, StrEq(""))).Times(1); |
+ EXPECT_CALL(media_observer(), |
OnSetAudioStreamPlaying(_, 1, true)); |
EXPECT_CALL(media_observer(), |
OnSetAudioStreamStatus(_, 1, StrEq("closed"))); |
EXPECT_CALL(media_observer(), |
OnDeleteAudioStream(_, 1)).Times(AnyNumber()); |
- scoped_refptr<WebRtcAudioRenderer> renderer = |
- new WebRtcAudioRenderer(kRenderViewId); |
- scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
- new WebRtcAudioDeviceImpl()); |
- EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
- |
WebRTCAutoDelete<webrtc::VoiceEngine> engine(webrtc::VoiceEngine::Create()); |
ASSERT_TRUE(engine.valid()); |
- |
ScopedWebRTCPtr<webrtc::VoEBase> base(engine.get()); |
ASSERT_TRUE(base.valid()); |
+ |
+ scoped_refptr<WebRtcAudioDeviceImpl> webrtc_audio_device( |
+ new WebRtcAudioDeviceImpl()); |
int err = base->Init(webrtc_audio_device.get()); |
ASSERT_EQ(0, err); |
@@ -826,6 +832,9 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_FullDuplexAudioWithAGC) { |
EXPECT_EQ(0, network->RegisterExternalTransport(ch, *transport.get())); |
EXPECT_EQ(0, base->StartPlayout(ch)); |
EXPECT_EQ(0, base->StartSend(ch)); |
+ scoped_refptr<WebRtcAudioRenderer> renderer = |
+ new WebRtcAudioRenderer(kRenderViewId); |
+ EXPECT_TRUE(webrtc_audio_device->SetAudioRenderer(renderer.get())); |
renderer->Play(); |
LOG(INFO) << ">> You should now be able to hear yourself in loopback..."; |
@@ -900,7 +909,15 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, DISABLED_WebRtcRecordingSetupTime) { |
EXPECT_EQ(0, base->Terminate()); |
} |
-TEST_F(MAYBE_WebRTCAudioDeviceTest, WebRtcPlayoutSetupTime) { |
+ |
+// TODO(henrika): include on Android as well as soon as alla race conditions |
+// in OpenSLES are resolved. |
+#if defined(OS_ANDROID) |
+#define MAYBE_WebRtcPlayoutSetupTime DISABLED_WebRtcPlayoutSetupTime |
+#else |
+#define MAYBE_WebRtcPlayoutSetupTime WebRtcPlayoutSetupTime |
+#endif |
+TEST_F(MAYBE_WebRTCAudioDeviceTest, MAYBE_WebRtcPlayoutSetupTime) { |
if (!has_output_devices_) { |
LOG(WARNING) << "No output device detected."; |
return; |
@@ -909,11 +926,14 @@ TEST_F(MAYBE_WebRTCAudioDeviceTest, WebRtcPlayoutSetupTime) { |
scoped_ptr<media::AudioHardwareConfig> config = |
CreateRealHardwareConfig(audio_manager_.get()); |
SetAudioHardwareConfig(config.get()); |
+ media::AudioParameters params(config->GetOutputConfig()); |
if (!HardwareSampleRatesAreValid()) |
return; |
EXPECT_CALL(media_observer(), |
+ OnAudioStreamCreated(_, 1, params, StrEq(""))).Times(1); |
+ EXPECT_CALL(media_observer(), |
OnSetAudioStreamStatus(_, 1, _)).Times(AnyNumber()); |
EXPECT_CALL(media_observer(), |
OnSetAudioStreamPlaying(_, 1, true)); |