| OLD | NEW | 
|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "net/url_request/url_request_job.h" | 5 #include "net/url_request/url_request_job.h" | 
| 6 | 6 | 
| 7 #include "base/bind.h" | 7 #include "base/bind.h" | 
| 8 #include "base/compiler_specific.h" | 8 #include "base/compiler_specific.h" | 
| 9 #include "base/message_loop.h" | 9 #include "base/message_loop.h" | 
| 10 #include "base/power_monitor/power_monitor.h" | 10 #include "base/power_monitor/power_monitor.h" | 
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 64 | 64 | 
| 65 // This function calls ReadData to get stream data. If a filter exists, passes | 65 // This function calls ReadData to get stream data. If a filter exists, passes | 
| 66 // the data to the attached filter. Then returns the output from filter back to | 66 // the data to the attached filter. Then returns the output from filter back to | 
| 67 // the caller. | 67 // the caller. | 
| 68 bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) { | 68 bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) { | 
| 69   bool rv = false; | 69   bool rv = false; | 
| 70 | 70 | 
| 71   DCHECK_LT(buf_size, 1000000);  // Sanity check. | 71   DCHECK_LT(buf_size, 1000000);  // Sanity check. | 
| 72   DCHECK(buf); | 72   DCHECK(buf); | 
| 73   DCHECK(bytes_read); | 73   DCHECK(bytes_read); | 
| 74   DCHECK(filtered_read_buffer_ == NULL); | 74   DCHECK(filtered_read_buffer_.get() == NULL); | 
| 75   DCHECK_EQ(0, filtered_read_buffer_len_); | 75   DCHECK_EQ(0, filtered_read_buffer_len_); | 
| 76 | 76 | 
| 77   *bytes_read = 0; | 77   *bytes_read = 0; | 
| 78 | 78 | 
| 79   // Skip Filter if not present. | 79   // Skip Filter if not present. | 
| 80   if (!filter_.get()) { | 80   if (!filter_.get()) { | 
| 81     rv = ReadRawDataHelper(buf, buf_size, bytes_read); | 81     rv = ReadRawDataHelper(buf, buf_size, bytes_read); | 
| 82   } else { | 82   } else { | 
| 83     // Save the caller's buffers while we do IO | 83     // Save the caller's buffers while we do IO | 
| 84     // in the filter's buffers. | 84     // in the filter's buffers. | 
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 331       } else { | 331       } else { | 
| 332         FollowRedirect(new_location, http_status_code); | 332         FollowRedirect(new_location, http_status_code); | 
| 333       } | 333       } | 
| 334       return; | 334       return; | 
| 335     } | 335     } | 
| 336   } else if (NeedsAuth()) { | 336   } else if (NeedsAuth()) { | 
| 337     scoped_refptr<AuthChallengeInfo> auth_info; | 337     scoped_refptr<AuthChallengeInfo> auth_info; | 
| 338     GetAuthChallengeInfo(&auth_info); | 338     GetAuthChallengeInfo(&auth_info); | 
| 339     // Need to check for a NULL auth_info because the server may have failed | 339     // Need to check for a NULL auth_info because the server may have failed | 
| 340     // to send a challenge with the 401 response. | 340     // to send a challenge with the 401 response. | 
| 341     if (auth_info) { | 341     if (auth_info.get()) { | 
| 342       request_->NotifyAuthRequired(auth_info); | 342       request_->NotifyAuthRequired(auth_info.get()); | 
| 343       // Wait for SetAuth or CancelAuth to be called. | 343       // Wait for SetAuth or CancelAuth to be called. | 
| 344       return; | 344       return; | 
| 345     } | 345     } | 
| 346   } | 346   } | 
| 347 | 347 | 
| 348   has_handled_response_ = true; | 348   has_handled_response_ = true; | 
| 349   if (request_->status().is_success()) | 349   if (request_->status().is_success()) | 
| 350     filter_.reset(SetupFilter()); | 350     filter_.reset(SetupFilter()); | 
| 351 | 351 | 
| 352   if (!filter_.get()) { | 352   if (!filter_.get()) { | 
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 498   // Do nothing. | 498   // Do nothing. | 
| 499 } | 499 } | 
| 500 | 500 | 
| 501 void URLRequestJob::FilteredDataRead(int bytes_read) { | 501 void URLRequestJob::FilteredDataRead(int bytes_read) { | 
| 502   DCHECK(filter_.get());  // don't add data if there is no filter | 502   DCHECK(filter_.get());  // don't add data if there is no filter | 
| 503   filter_->FlushStreamBuffer(bytes_read); | 503   filter_->FlushStreamBuffer(bytes_read); | 
| 504 } | 504 } | 
| 505 | 505 | 
| 506 bool URLRequestJob::ReadFilteredData(int* bytes_read) { | 506 bool URLRequestJob::ReadFilteredData(int* bytes_read) { | 
| 507   DCHECK(filter_.get());  // don't add data if there is no filter | 507   DCHECK(filter_.get());  // don't add data if there is no filter | 
| 508   DCHECK(filtered_read_buffer_ != NULL);  // we need to have a buffer to fill | 508   DCHECK(filtered_read_buffer_.get() != | 
|  | 509          NULL);                             // we need to have a buffer to fill | 
| 509   DCHECK_GT(filtered_read_buffer_len_, 0);  // sanity check | 510   DCHECK_GT(filtered_read_buffer_len_, 0);  // sanity check | 
| 510   DCHECK_LT(filtered_read_buffer_len_, 1000000);  // sanity check | 511   DCHECK_LT(filtered_read_buffer_len_, 1000000);  // sanity check | 
| 511   DCHECK(raw_read_buffer_ == NULL);  // there should be no raw read buffer yet | 512   DCHECK(raw_read_buffer_.get() == | 
|  | 513          NULL);  // there should be no raw read buffer yet | 
| 512 | 514 | 
| 513   bool rv = false; | 515   bool rv = false; | 
| 514   *bytes_read = 0; | 516   *bytes_read = 0; | 
| 515 | 517 | 
| 516   if (is_done()) | 518   if (is_done()) | 
| 517     return true; | 519     return true; | 
| 518 | 520 | 
| 519   if (!filter_needs_more_output_space_ && !filter_->stream_data_len()) { | 521   if (!filter_needs_more_output_space_ && !filter_->stream_data_len()) { | 
| 520     // We don't have any raw data to work with, so | 522     // We don't have any raw data to work with, so | 
| 521     // read from the socket. | 523     // read from the socket. | 
| (...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 657     IOBuffer* stream_buffer = filter_->stream_buffer(); | 659     IOBuffer* stream_buffer = filter_->stream_buffer(); | 
| 658     int stream_buffer_size = filter_->stream_buffer_size(); | 660     int stream_buffer_size = filter_->stream_buffer_size(); | 
| 659     rv = ReadRawDataHelper(stream_buffer, stream_buffer_size, bytes_read); | 661     rv = ReadRawDataHelper(stream_buffer, stream_buffer_size, bytes_read); | 
| 660   } | 662   } | 
| 661   return rv; | 663   return rv; | 
| 662 } | 664 } | 
| 663 | 665 | 
| 664 bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf, int buf_size, | 666 bool URLRequestJob::ReadRawDataHelper(IOBuffer* buf, int buf_size, | 
| 665                                       int* bytes_read) { | 667                                       int* bytes_read) { | 
| 666   DCHECK(!request_->status().is_io_pending()); | 668   DCHECK(!request_->status().is_io_pending()); | 
| 667   DCHECK(raw_read_buffer_ == NULL); | 669   DCHECK(raw_read_buffer_.get() == NULL); | 
| 668 | 670 | 
| 669   // Keep a pointer to the read buffer, so we have access to it in the | 671   // Keep a pointer to the read buffer, so we have access to it in the | 
| 670   // OnRawReadComplete() callback in the event that the read completes | 672   // OnRawReadComplete() callback in the event that the read completes | 
| 671   // asynchronously. | 673   // asynchronously. | 
| 672   raw_read_buffer_ = buf; | 674   raw_read_buffer_ = buf; | 
| 673   bool rv = ReadRawData(buf, buf_size, bytes_read); | 675   bool rv = ReadRawData(buf, buf_size, bytes_read); | 
| 674 | 676 | 
| 675   if (!request_->status().is_io_pending()) { | 677   if (!request_->status().is_io_pending()) { | 
| 676     // If |filter_| is NULL, and logging all bytes is enabled, log the raw | 678     // If |filter_| is NULL, and logging all bytes is enabled, log the raw | 
| 677     // bytes read. | 679     // bytes read. | 
| (...skipping 12 matching lines...) Expand all  Loading... | 
| 690   return rv; | 692   return rv; | 
| 691 } | 693 } | 
| 692 | 694 | 
| 693 void URLRequestJob::FollowRedirect(const GURL& location, int http_status_code) { | 695 void URLRequestJob::FollowRedirect(const GURL& location, int http_status_code) { | 
| 694   int rv = request_->Redirect(location, http_status_code); | 696   int rv = request_->Redirect(location, http_status_code); | 
| 695   if (rv != OK) | 697   if (rv != OK) | 
| 696     NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); | 698     NotifyDone(URLRequestStatus(URLRequestStatus::FAILED, rv)); | 
| 697 } | 699 } | 
| 698 | 700 | 
| 699 void URLRequestJob::OnRawReadComplete(int bytes_read) { | 701 void URLRequestJob::OnRawReadComplete(int bytes_read) { | 
| 700   DCHECK(raw_read_buffer_); | 702   DCHECK(raw_read_buffer_.get()); | 
| 701   if (bytes_read > 0) { | 703   if (bytes_read > 0) { | 
| 702     RecordBytesRead(bytes_read); | 704     RecordBytesRead(bytes_read); | 
| 703   } | 705   } | 
| 704   raw_read_buffer_ = NULL; | 706   raw_read_buffer_ = NULL; | 
| 705 } | 707 } | 
| 706 | 708 | 
| 707 void URLRequestJob::RecordBytesRead(int bytes_read) { | 709 void URLRequestJob::RecordBytesRead(int bytes_read) { | 
| 708   filter_input_byte_count_ += bytes_read; | 710   filter_input_byte_count_ += bytes_read; | 
| 709   prefilter_bytes_read_ += bytes_read; | 711   prefilter_bytes_read_ += bytes_read; | 
| 710   if (!filter_.get()) | 712   if (!filter_.get()) | 
| 711     postfilter_bytes_read_ += bytes_read; | 713     postfilter_bytes_read_ += bytes_read; | 
| 712   DVLOG(2) << __FUNCTION__ << "() " | 714   DVLOG(2) << __FUNCTION__ << "() " | 
| 713            << "\"" << (request_ ? request_->url().spec() : "???") << "\"" | 715            << "\"" << (request_ ? request_->url().spec() : "???") << "\"" | 
| 714            << " pre bytes read = " << bytes_read | 716            << " pre bytes read = " << bytes_read | 
| 715            << " pre total = " << prefilter_bytes_read_ | 717            << " pre total = " << prefilter_bytes_read_ | 
| 716            << " post total = " << postfilter_bytes_read_; | 718            << " post total = " << postfilter_bytes_read_; | 
| 717   UpdatePacketReadTimes();  // Facilitate stats recording if it is active. | 719   UpdatePacketReadTimes();  // Facilitate stats recording if it is active. | 
| 718   if (network_delegate_) | 720   if (network_delegate_) | 
| 719     network_delegate_->NotifyRawBytesRead(*request_, bytes_read); | 721     network_delegate_->NotifyRawBytesRead(*request_, bytes_read); | 
| 720 } | 722 } | 
| 721 | 723 | 
| 722 bool URLRequestJob::FilterHasData() { | 724 bool URLRequestJob::FilterHasData() { | 
| 723     return filter_.get() && filter_->stream_data_len(); | 725     return filter_.get() && filter_->stream_data_len(); | 
| 724 } | 726 } | 
| 725 | 727 | 
| 726 void URLRequestJob::UpdatePacketReadTimes() { | 728 void URLRequestJob::UpdatePacketReadTimes() { | 
| 727 } | 729 } | 
| 728 | 730 | 
| 729 }  // namespace net | 731 }  // namespace net | 
| OLD | NEW | 
|---|