OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "components/autofill/content/browser/autocheckout/whitelist_manager.h" | |
6 | |
7 #include "base/command_line.h" | |
8 #include "base/logging.h" | |
9 #include "base/memory/scoped_ptr.h" | |
10 #include "base/metrics/field_trial.h" | |
11 #include "base/strings/string_split.h" | |
12 #include "base/strings/string_util.h" | |
13 #include "components/autofill/core/common/autofill_switches.h" | |
14 #include "content/public/browser/browser_context.h" | |
15 #include "net/base/load_flags.h" | |
16 #include "net/http/http_status_code.h" | |
17 #include "net/url_request/url_fetcher.h" | |
18 #include "net/url_request/url_request_context_getter.h" | |
19 #include "url/gurl.h" | |
20 | |
21 namespace { | |
22 | |
23 // Back off in seconds after each whitelist download is attempted. | |
24 const int kDownloadIntervalSeconds = 86400; // 1 day | |
25 | |
26 // The delay in seconds after startup before download whitelist. This helps | |
27 // to reduce contention at startup time. | |
28 const int kInitialDownloadDelaySeconds = 3; | |
29 | |
30 const net::BackoffEntry::Policy kBackoffPolicy = { | |
31 // Number of initial errors to ignore before starting to back off. | |
32 0, | |
33 | |
34 // Initial delay in ms: 3 seconds. | |
35 3000, | |
36 | |
37 // Factor by which the waiting time is multiplied. | |
38 6, | |
39 | |
40 // Fuzzing percentage: no fuzzing logic. | |
41 0, | |
42 | |
43 // Maximum delay in ms: 1 hour. | |
44 1000 * 60 * 60, | |
45 | |
46 // When to discard an entry: 3 hours. | |
47 1000 * 60 * 60 * 3, | |
48 | |
49 // |always_use_initial_delay|; false means that the initial delay is | |
50 // applied after the first error, and starts backing off from there. | |
51 false, | |
52 }; | |
53 | |
54 const char kDefaultWhitelistUrl[] = | |
55 "https://www.gstatic.com/commerce/autocheckout/whitelist.csv"; | |
56 | |
57 const char kWhiteListKeyName[] = "autocheckout_whitelist_manager"; | |
58 | |
59 std::string GetWhitelistUrl() { | |
60 const CommandLine& command_line = *CommandLine::ForCurrentProcess(); | |
61 std::string whitelist_url = command_line.GetSwitchValueASCII( | |
62 autofill::switches::kAutocheckoutWhitelistUrl); | |
63 | |
64 return whitelist_url.empty() ? kDefaultWhitelistUrl : whitelist_url; | |
65 } | |
66 | |
67 } // namespace | |
68 | |
69 | |
70 namespace autofill { | |
71 namespace autocheckout { | |
72 | |
73 WhitelistManager::WhitelistManager() | |
74 : callback_is_pending_(false), | |
75 experimental_form_filling_enabled_( | |
76 CommandLine::ForCurrentProcess()->HasSwitch( | |
77 switches::kEnableExperimentalFormFilling) || | |
78 base::FieldTrialList::FindFullName("Autocheckout") == "Yes"), | |
79 bypass_autocheckout_whitelist_( | |
80 CommandLine::ForCurrentProcess()->HasSwitch( | |
81 switches::kBypassAutocheckoutWhitelist)), | |
82 retry_entry_(&kBackoffPolicy) { | |
83 } | |
84 | |
85 WhitelistManager::~WhitelistManager() {} | |
86 | |
87 void WhitelistManager::Init(net::URLRequestContextGetter* context_getter) { | |
88 DCHECK(context_getter); | |
89 context_getter_ = context_getter; | |
90 ScheduleDownload(base::TimeDelta::FromSeconds(kInitialDownloadDelaySeconds)); | |
91 } | |
92 | |
93 void WhitelistManager::ScheduleDownload(base::TimeDelta interval) { | |
94 if (!experimental_form_filling_enabled_) { | |
95 // The feature is not enabled: do not do the request. | |
96 return; | |
97 } | |
98 if (download_timer_.IsRunning() || callback_is_pending_) { | |
99 // A download activity is already scheduled or happening. | |
100 return; | |
101 } | |
102 StartDownloadTimer(interval); | |
103 } | |
104 | |
105 void WhitelistManager::StartDownloadTimer(base::TimeDelta interval) { | |
106 download_timer_.Start(FROM_HERE, | |
107 interval, | |
108 this, | |
109 &WhitelistManager::TriggerDownload); | |
110 } | |
111 | |
112 const AutofillMetrics& WhitelistManager::GetMetricLogger() const { | |
113 return metrics_logger_; | |
114 } | |
115 | |
116 void WhitelistManager::TriggerDownload() { | |
117 callback_is_pending_ = true; | |
118 | |
119 request_started_timestamp_ = base::Time::Now(); | |
120 | |
121 request_.reset(net::URLFetcher::Create( | |
122 0, GURL(GetWhitelistUrl()), net::URLFetcher::GET, this)); | |
123 request_->SetRequestContext(context_getter_); | |
124 request_->SetAutomaticallyRetryOn5xx(false); | |
125 request_->SetLoadFlags(net::LOAD_DO_NOT_SAVE_COOKIES | | |
126 net::LOAD_DO_NOT_SEND_COOKIES); | |
127 request_->Start(); | |
128 } | |
129 | |
130 void WhitelistManager::StopDownloadTimer() { | |
131 download_timer_.Stop(); | |
132 callback_is_pending_ = false; | |
133 } | |
134 | |
135 void WhitelistManager::OnURLFetchComplete( | |
136 const net::URLFetcher* source) { | |
137 DCHECK(callback_is_pending_); | |
138 callback_is_pending_ = false; | |
139 scoped_ptr<net::URLFetcher> old_request = request_.Pass(); | |
140 DCHECK_EQ(source, old_request.get()); | |
141 | |
142 AutofillMetrics::AutocheckoutWhitelistDownloadStatus status; | |
143 base::TimeDelta duration = base::Time::Now() - request_started_timestamp_; | |
144 | |
145 // Refresh the whitelist after kDownloadIntervalSeconds (24 hours). | |
146 base::TimeDelta next_download_time = | |
147 base::TimeDelta::FromSeconds(kDownloadIntervalSeconds); | |
148 | |
149 if (source->GetResponseCode() == net::HTTP_OK) { | |
150 std::string data; | |
151 source->GetResponseAsString(&data); | |
152 BuildWhitelist(data); | |
153 status = AutofillMetrics::AUTOCHECKOUT_WHITELIST_DOWNLOAD_SUCCEEDED; | |
154 retry_entry_.Reset(); | |
155 } else { | |
156 status = AutofillMetrics::AUTOCHECKOUT_WHITELIST_DOWNLOAD_FAILED; | |
157 retry_entry_.InformOfRequest(false); | |
158 if (!retry_entry_.CanDiscard()) | |
159 next_download_time = retry_entry_.GetTimeUntilRelease(); | |
160 } | |
161 | |
162 GetMetricLogger().LogAutocheckoutWhitelistDownloadDuration(duration, status); | |
163 ScheduleDownload(next_download_time); | |
164 } | |
165 | |
166 std::string WhitelistManager::GetMatchedURLPrefix(const GURL& url) const { | |
167 if (!experimental_form_filling_enabled_ || url.is_empty()) | |
168 return std::string(); | |
169 | |
170 for (std::vector<std::string>::const_iterator it = url_prefixes_.begin(); | |
171 it != url_prefixes_.end(); ++it) { | |
172 // This is only for ~20 sites initially, liner search is sufficient. | |
173 // TODO(benquan): Look for optimization options when we support | |
174 // more sites. | |
175 if (StartsWithASCII(url.spec(), *it, true)) { | |
176 DVLOG(1) << "WhitelistManager matched URLPrefix: " << *it; | |
177 return *it; | |
178 } | |
179 } | |
180 return bypass_autocheckout_whitelist_ ? url.spec() : std::string(); | |
181 } | |
182 | |
183 void WhitelistManager::BuildWhitelist(const std::string& data) { | |
184 std::vector<std::string> new_url_prefixes; | |
185 | |
186 std::vector<std::string> lines; | |
187 base::SplitString(data, '\n', &lines); | |
188 | |
189 for (std::vector<std::string>::const_iterator line = lines.begin(); | |
190 line != lines.end(); ++line) { | |
191 if (!line->empty()) { | |
192 std::vector<std::string> fields; | |
193 base::SplitString(*line, ',', &fields); | |
194 // Currently we have only one column in the whitelist file, if we decide | |
195 // to add more metadata as additional columns, previous versions of | |
196 // Chrome can ignore them and continue to work. | |
197 if (!fields[0].empty()) | |
198 new_url_prefixes.push_back(fields[0]); | |
199 } | |
200 } | |
201 url_prefixes_ = new_url_prefixes; | |
202 } | |
203 | |
204 } // namespace autocheckout | |
205 } // namespace autofill | |
OLD | NEW |