OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "net/proxy/proxy_script_fetcher_impl.h" | 5 #include "net/proxy/proxy_script_fetcher_impl.h" |
6 | 6 |
7 #include <string> | 7 #include <string> |
8 | 8 |
9 #include "base/file_path.h" | 9 #include "base/file_path.h" |
10 #include "base/compiler_specific.h" | 10 #include "base/compiler_specific.h" |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
88 new HttpNetworkSession(params)); | 88 new HttpNetworkSession(params)); |
89 storage_.set_http_transaction_factory(new HttpCache( | 89 storage_.set_http_transaction_factory(new HttpCache( |
90 network_session, | 90 network_session, |
91 HttpCache::DefaultBackend::InMemory(0))); | 91 HttpCache::DefaultBackend::InMemory(0))); |
92 url_request_job_factory_.reset(new URLRequestJobFactory); | 92 url_request_job_factory_.reset(new URLRequestJobFactory); |
93 set_job_factory(url_request_job_factory_.get()); | 93 set_job_factory(url_request_job_factory_.get()); |
94 url_request_job_factory_->AddInterceptor( | 94 url_request_job_factory_->AddInterceptor( |
95 new CheckNoRevocationFlagSetInterceptor); | 95 new CheckNoRevocationFlagSetInterceptor); |
96 } | 96 } |
97 | 97 |
98 private: | 98 virtual ~RequestContext() { |
99 ~RequestContext() { | |
100 } | 99 } |
101 | 100 |
| 101 private: |
102 URLRequestContextStorage storage_; | 102 URLRequestContextStorage storage_; |
103 scoped_ptr<URLRequestJobFactory> url_request_job_factory_; | 103 scoped_ptr<URLRequestJobFactory> url_request_job_factory_; |
104 }; | 104 }; |
105 | 105 |
106 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. | 106 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest. |
107 GURL GetTestFileUrl(const std::string& relpath) { | 107 GURL GetTestFileUrl(const std::string& relpath) { |
108 FilePath path; | 108 FilePath path; |
109 PathService::Get(base::DIR_SOURCE_ROOT, &path); | 109 PathService::Get(base::DIR_SOURCE_ROOT, &path); |
110 path = path.AppendASCII("net"); | 110 path = path.AppendASCII("net"); |
111 path = path.AppendASCII("data"); | 111 path = path.AppendASCII("data"); |
(...skipping 14 matching lines...) Expand all Loading... |
126 | 126 |
127 static void SetUpTestCase() { | 127 static void SetUpTestCase() { |
128 URLRequest::AllowFileAccess(); | 128 URLRequest::AllowFileAccess(); |
129 } | 129 } |
130 | 130 |
131 protected: | 131 protected: |
132 TestServer test_server_; | 132 TestServer test_server_; |
133 }; | 133 }; |
134 | 134 |
135 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { | 135 TEST_F(ProxyScriptFetcherImplTest, FileUrl) { |
136 scoped_refptr<URLRequestContext> context(new RequestContext); | 136 RequestContext context; |
137 ProxyScriptFetcherImpl pac_fetcher(context); | 137 ProxyScriptFetcherImpl pac_fetcher(&context); |
138 | 138 |
139 { // Fetch a non-existent file. | 139 { // Fetch a non-existent file. |
140 string16 text; | 140 string16 text; |
141 TestCompletionCallback callback; | 141 TestCompletionCallback callback; |
142 int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"), | 142 int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"), |
143 &text, callback.callback()); | 143 &text, callback.callback()); |
144 EXPECT_EQ(ERR_IO_PENDING, result); | 144 EXPECT_EQ(ERR_IO_PENDING, result); |
145 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); | 145 EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult()); |
146 EXPECT_TRUE(text.empty()); | 146 EXPECT_TRUE(text.empty()); |
147 } | 147 } |
148 { // Fetch a file that exists. | 148 { // Fetch a file that exists. |
149 string16 text; | 149 string16 text; |
150 TestCompletionCallback callback; | 150 TestCompletionCallback callback; |
151 int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"), | 151 int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"), |
152 &text, callback.callback()); | 152 &text, callback.callback()); |
153 EXPECT_EQ(ERR_IO_PENDING, result); | 153 EXPECT_EQ(ERR_IO_PENDING, result); |
154 EXPECT_EQ(OK, callback.WaitForResult()); | 154 EXPECT_EQ(OK, callback.WaitForResult()); |
155 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | 155 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); |
156 } | 156 } |
157 } | 157 } |
158 | 158 |
159 // Note that all mime types are allowed for PAC file, to be consistent | 159 // Note that all mime types are allowed for PAC file, to be consistent |
160 // with other browsers. | 160 // with other browsers. |
161 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { | 161 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) { |
162 ASSERT_TRUE(test_server_.Start()); | 162 ASSERT_TRUE(test_server_.Start()); |
163 | 163 |
164 scoped_refptr<URLRequestContext> context(new RequestContext); | 164 RequestContext context; |
165 ProxyScriptFetcherImpl pac_fetcher(context); | 165 ProxyScriptFetcherImpl pac_fetcher(&context); |
166 | 166 |
167 { // Fetch a PAC with mime type "text/plain" | 167 { // Fetch a PAC with mime type "text/plain" |
168 GURL url(test_server_.GetURL("files/pac.txt")); | 168 GURL url(test_server_.GetURL("files/pac.txt")); |
169 string16 text; | 169 string16 text; |
170 TestCompletionCallback callback; | 170 TestCompletionCallback callback; |
171 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 171 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
172 EXPECT_EQ(ERR_IO_PENDING, result); | 172 EXPECT_EQ(ERR_IO_PENDING, result); |
173 EXPECT_EQ(OK, callback.WaitForResult()); | 173 EXPECT_EQ(OK, callback.WaitForResult()); |
174 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); | 174 EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text); |
175 } | 175 } |
(...skipping 13 matching lines...) Expand all Loading... |
189 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 189 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
190 EXPECT_EQ(ERR_IO_PENDING, result); | 190 EXPECT_EQ(ERR_IO_PENDING, result); |
191 EXPECT_EQ(OK, callback.WaitForResult()); | 191 EXPECT_EQ(OK, callback.WaitForResult()); |
192 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 192 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
193 } | 193 } |
194 } | 194 } |
195 | 195 |
196 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { | 196 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) { |
197 ASSERT_TRUE(test_server_.Start()); | 197 ASSERT_TRUE(test_server_.Start()); |
198 | 198 |
199 scoped_refptr<URLRequestContext> context(new RequestContext); | 199 RequestContext context; |
200 ProxyScriptFetcherImpl pac_fetcher(context); | 200 ProxyScriptFetcherImpl pac_fetcher(&context); |
201 | 201 |
202 { // Fetch a PAC which gives a 500 -- FAIL | 202 { // Fetch a PAC which gives a 500 -- FAIL |
203 GURL url(test_server_.GetURL("files/500.pac")); | 203 GURL url(test_server_.GetURL("files/500.pac")); |
204 string16 text; | 204 string16 text; |
205 TestCompletionCallback callback; | 205 TestCompletionCallback callback; |
206 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 206 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
207 EXPECT_EQ(ERR_IO_PENDING, result); | 207 EXPECT_EQ(ERR_IO_PENDING, result); |
208 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | 208 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); |
209 EXPECT_TRUE(text.empty()); | 209 EXPECT_TRUE(text.empty()); |
210 } | 210 } |
211 { // Fetch a PAC which gives a 404 -- FAIL | 211 { // Fetch a PAC which gives a 404 -- FAIL |
212 GURL url(test_server_.GetURL("files/404.pac")); | 212 GURL url(test_server_.GetURL("files/404.pac")); |
213 string16 text; | 213 string16 text; |
214 TestCompletionCallback callback; | 214 TestCompletionCallback callback; |
215 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 215 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
216 EXPECT_EQ(ERR_IO_PENDING, result); | 216 EXPECT_EQ(ERR_IO_PENDING, result); |
217 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); | 217 EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult()); |
218 EXPECT_TRUE(text.empty()); | 218 EXPECT_TRUE(text.empty()); |
219 } | 219 } |
220 } | 220 } |
221 | 221 |
222 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { | 222 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) { |
223 ASSERT_TRUE(test_server_.Start()); | 223 ASSERT_TRUE(test_server_.Start()); |
224 | 224 |
225 scoped_refptr<URLRequestContext> context(new RequestContext); | 225 RequestContext context; |
226 ProxyScriptFetcherImpl pac_fetcher(context); | 226 ProxyScriptFetcherImpl pac_fetcher(&context); |
227 | 227 |
228 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should | 228 // Fetch PAC scripts via HTTP with a Content-Disposition header -- should |
229 // have no effect. | 229 // have no effect. |
230 GURL url(test_server_.GetURL("files/downloadable.pac")); | 230 GURL url(test_server_.GetURL("files/downloadable.pac")); |
231 string16 text; | 231 string16 text; |
232 TestCompletionCallback callback; | 232 TestCompletionCallback callback; |
233 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 233 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
234 EXPECT_EQ(ERR_IO_PENDING, result); | 234 EXPECT_EQ(ERR_IO_PENDING, result); |
235 EXPECT_EQ(OK, callback.WaitForResult()); | 235 EXPECT_EQ(OK, callback.WaitForResult()); |
236 EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); | 236 EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text); |
237 } | 237 } |
238 | 238 |
239 TEST_F(ProxyScriptFetcherImplTest, NoCache) { | 239 TEST_F(ProxyScriptFetcherImplTest, NoCache) { |
240 ASSERT_TRUE(test_server_.Start()); | 240 ASSERT_TRUE(test_server_.Start()); |
241 | 241 |
242 scoped_refptr<URLRequestContext> context(new RequestContext); | 242 RequestContext context; |
243 ProxyScriptFetcherImpl pac_fetcher(context); | 243 ProxyScriptFetcherImpl pac_fetcher(&context); |
244 | 244 |
245 // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. | 245 // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour. |
246 GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); | 246 GURL url(test_server_.GetURL("files/cacheable_1hr.pac")); |
247 { | 247 { |
248 string16 text; | 248 string16 text; |
249 TestCompletionCallback callback; | 249 TestCompletionCallback callback; |
250 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 250 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
251 EXPECT_EQ(ERR_IO_PENDING, result); | 251 EXPECT_EQ(ERR_IO_PENDING, result); |
252 EXPECT_EQ(OK, callback.WaitForResult()); | 252 EXPECT_EQ(OK, callback.WaitForResult()); |
253 EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text); | 253 EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text); |
(...skipping 10 matching lines...) Expand all Loading... |
264 TestCompletionCallback callback; | 264 TestCompletionCallback callback; |
265 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 265 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
266 EXPECT_EQ(ERR_IO_PENDING, result); | 266 EXPECT_EQ(ERR_IO_PENDING, result); |
267 EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult()); | 267 EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult()); |
268 } | 268 } |
269 } | 269 } |
270 | 270 |
271 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { | 271 TEST_F(ProxyScriptFetcherImplTest, TooLarge) { |
272 ASSERT_TRUE(test_server_.Start()); | 272 ASSERT_TRUE(test_server_.Start()); |
273 | 273 |
274 scoped_refptr<URLRequestContext> context(new RequestContext); | 274 RequestContext context; |
275 ProxyScriptFetcherImpl pac_fetcher(context); | 275 ProxyScriptFetcherImpl pac_fetcher(&context); |
276 | 276 |
277 // Set the maximum response size to 50 bytes. | 277 // Set the maximum response size to 50 bytes. |
278 int prev_size = pac_fetcher.SetSizeConstraint(50); | 278 int prev_size = pac_fetcher.SetSizeConstraint(50); |
279 | 279 |
280 // These two URLs are the same file, but are http:// vs file:// | 280 // These two URLs are the same file, but are http:// vs file:// |
281 GURL urls[] = { | 281 GURL urls[] = { |
282 test_server_.GetURL("files/large-pac.nsproxy"), | 282 test_server_.GetURL("files/large-pac.nsproxy"), |
283 GetTestFileUrl("large-pac.nsproxy") | 283 GetTestFileUrl("large-pac.nsproxy") |
284 }; | 284 }; |
285 | 285 |
(...skipping 19 matching lines...) Expand all Loading... |
305 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 305 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
306 EXPECT_EQ(ERR_IO_PENDING, result); | 306 EXPECT_EQ(ERR_IO_PENDING, result); |
307 EXPECT_EQ(OK, callback.WaitForResult()); | 307 EXPECT_EQ(OK, callback.WaitForResult()); |
308 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 308 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
309 } | 309 } |
310 } | 310 } |
311 | 311 |
312 TEST_F(ProxyScriptFetcherImplTest, Hang) { | 312 TEST_F(ProxyScriptFetcherImplTest, Hang) { |
313 ASSERT_TRUE(test_server_.Start()); | 313 ASSERT_TRUE(test_server_.Start()); |
314 | 314 |
315 scoped_refptr<URLRequestContext> context(new RequestContext); | 315 RequestContext context; |
316 ProxyScriptFetcherImpl pac_fetcher(context); | 316 ProxyScriptFetcherImpl pac_fetcher(&context); |
317 | 317 |
318 // Set the timeout period to 0.5 seconds. | 318 // Set the timeout period to 0.5 seconds. |
319 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint( | 319 base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint( |
320 base::TimeDelta::FromMilliseconds(500)); | 320 base::TimeDelta::FromMilliseconds(500)); |
321 | 321 |
322 // Try fetching a URL which takes 1.2 seconds. We should abort the request | 322 // Try fetching a URL which takes 1.2 seconds. We should abort the request |
323 // after 500 ms, and fail with a timeout error. | 323 // after 500 ms, and fail with a timeout error. |
324 { GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); | 324 { GURL url(test_server_.GetURL("slow/proxy.pac?1.2")); |
325 string16 text; | 325 string16 text; |
326 TestCompletionCallback callback; | 326 TestCompletionCallback callback; |
(...skipping 16 matching lines...) Expand all Loading... |
343 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); | 343 EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text); |
344 } | 344 } |
345 } | 345 } |
346 | 346 |
347 // The ProxyScriptFetcher should decode any content-codings | 347 // The ProxyScriptFetcher should decode any content-codings |
348 // (like gzip, bzip, etc.), and apply any charset conversions to yield | 348 // (like gzip, bzip, etc.), and apply any charset conversions to yield |
349 // UTF8. | 349 // UTF8. |
350 TEST_F(ProxyScriptFetcherImplTest, Encodings) { | 350 TEST_F(ProxyScriptFetcherImplTest, Encodings) { |
351 ASSERT_TRUE(test_server_.Start()); | 351 ASSERT_TRUE(test_server_.Start()); |
352 | 352 |
353 scoped_refptr<URLRequestContext> context(new RequestContext); | 353 RequestContext context; |
354 ProxyScriptFetcherImpl pac_fetcher(context); | 354 ProxyScriptFetcherImpl pac_fetcher(&context); |
355 | 355 |
356 // Test a response that is gzip-encoded -- should get inflated. | 356 // Test a response that is gzip-encoded -- should get inflated. |
357 { | 357 { |
358 GURL url(test_server_.GetURL("files/gzipped_pac")); | 358 GURL url(test_server_.GetURL("files/gzipped_pac")); |
359 string16 text; | 359 string16 text; |
360 TestCompletionCallback callback; | 360 TestCompletionCallback callback; |
361 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 361 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
362 EXPECT_EQ(ERR_IO_PENDING, result); | 362 EXPECT_EQ(ERR_IO_PENDING, result); |
363 EXPECT_EQ(OK, callback.WaitForResult()); | 363 EXPECT_EQ(OK, callback.WaitForResult()); |
364 EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text); | 364 EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text); |
365 } | 365 } |
366 | 366 |
367 // Test a response that was served as UTF-16 (BE). It should | 367 // Test a response that was served as UTF-16 (BE). It should |
368 // be converted to UTF8. | 368 // be converted to UTF8. |
369 { | 369 { |
370 GURL url(test_server_.GetURL("files/utf16be_pac")); | 370 GURL url(test_server_.GetURL("files/utf16be_pac")); |
371 string16 text; | 371 string16 text; |
372 TestCompletionCallback callback; | 372 TestCompletionCallback callback; |
373 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 373 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
374 EXPECT_EQ(ERR_IO_PENDING, result); | 374 EXPECT_EQ(ERR_IO_PENDING, result); |
375 EXPECT_EQ(OK, callback.WaitForResult()); | 375 EXPECT_EQ(OK, callback.WaitForResult()); |
376 EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text); | 376 EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text); |
377 } | 377 } |
378 } | 378 } |
379 | 379 |
380 TEST_F(ProxyScriptFetcherImplTest, DataURLs) { | 380 TEST_F(ProxyScriptFetcherImplTest, DataURLs) { |
381 scoped_refptr<URLRequestContext> context(new RequestContext); | 381 RequestContext context; |
382 ProxyScriptFetcherImpl pac_fetcher(context); | 382 ProxyScriptFetcherImpl pac_fetcher(&context); |
383 | 383 |
384 const char kEncodedUrl[] = | 384 const char kEncodedUrl[] = |
385 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R" | 385 "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R" |
386 "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl" | 386 "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl" |
387 "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0="; | 387 "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0="; |
388 const char kPacScript[] = | 388 const char kPacScript[] = |
389 "function FindProxyForURL(url, host) {\n" | 389 "function FindProxyForURL(url, host) {\n" |
390 " if (host == 'foobar.com')\n" | 390 " if (host == 'foobar.com')\n" |
391 " return 'PROXY blackhole:80';\n" | 391 " return 'PROXY blackhole:80';\n" |
392 " return 'DIRECT';\n" | 392 " return 'DIRECT';\n" |
(...skipping 16 matching lines...) Expand all Loading... |
409 { | 409 { |
410 GURL url(kEncodedUrlBroken); | 410 GURL url(kEncodedUrlBroken); |
411 string16 text; | 411 string16 text; |
412 TestCompletionCallback callback; | 412 TestCompletionCallback callback; |
413 int result = pac_fetcher.Fetch(url, &text, callback.callback()); | 413 int result = pac_fetcher.Fetch(url, &text, callback.callback()); |
414 EXPECT_EQ(ERR_FAILED, result); | 414 EXPECT_EQ(ERR_FAILED, result); |
415 } | 415 } |
416 } | 416 } |
417 | 417 |
418 } // namespace net | 418 } // namespace net |
OLD | NEW |