Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(106)

Side by Side Diff: tests/isolateserver_archive_test.py

Issue 14455006: Do not retry uploading to blobstore on HTTP 500, regenerate a new url first. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/swarm_client
Patch Set: address comment Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « run_isolated.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import binascii 6 import binascii
7 import random 7 import random
8 import hashlib 8 import hashlib
9 import logging 9 import logging
10 import os 10 import os
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
135 {'data': sha1encoded, 'content_type': 'application/octet-stream'}, 135 {'data': sha1encoded, 'content_type': 'application/octet-stream'},
136 StringIO.StringIO('\0'), 136 StringIO.StringIO('\0'),
137 ), 137 ),
138 ( 138 (
139 path + 'content/generate_blobstore_url/default-gzip/%s' % s, 139 path + 'content/generate_blobstore_url/default-gzip/%s' % s,
140 {'data': [('token', 'foo bar')]}, 140 {'data': [('token', 'foo bar')]},
141 StringIO.StringIO('an_url/'), 141 StringIO.StringIO('an_url/'),
142 ), 142 ),
143 ( 143 (
144 'an_url/', 144 'an_url/',
145 {'data': body, 'content_type': content_type}, 145 {'data': body, 'content_type': content_type, 'retry_50x': False},
146 StringIO.StringIO('ok'), 146 StringIO.StringIO('ok'),
147 ), 147 ),
148 ] 148 ]
149 149
150 old_read_and_compress = isolateserver_archive.read_and_compress 150 old_read_and_compress = isolateserver_archive.read_and_compress
151 try: 151 try:
152 isolateserver_archive.read_and_compress = lambda x, y: compressed 152 isolateserver_archive.read_and_compress = lambda x, y: compressed
153 result = isolateserver_archive.upload_sha1_tree( 153 result = isolateserver_archive.upload_sha1_tree(
154 base_url=path, 154 base_url=path,
155 indir=os.getcwd(), 155 indir=os.getcwd(),
(...skipping 25 matching lines...) Expand all
181 self.assertEqual('FakeUrl', url) 181 self.assertEqual('FakeUrl', url)
182 self.assertEqual(self.fail, upload_func) 182 self.assertEqual(self.fail, upload_func)
183 actual.extend(items) 183 actual.extend(items)
184 184
185 isolateserver_archive.update_files_to_upload = process 185 isolateserver_archive.update_files_to_upload = process
186 isolateserver_archive.process_items('FakeUrl', items, self.fail) 186 isolateserver_archive.process_items('FakeUrl', items, self.fail)
187 self.assertEqual(expected, actual) 187 self.assertEqual(expected, actual)
188 finally: 188 finally:
189 isolateserver_archive.update_files_to_upload = old 189 isolateserver_archive.update_files_to_upload = old
190 190
191 def test_upload_blobstore_simple(self):
192 content = 'blob_content'
193 s = hashlib.sha1(content).hexdigest()
194 path = 'http://example.com:80/'
195 data = [('token', 'foo bar')]
196 content_type, body = isolateserver_archive.encode_multipart_formdata(
197 data[:], [('content', s, 'blob_content')])
198 self._requests = [
199 (
200 path + 'gen_url?foo#bar',
201 {'data': data[:]},
202 StringIO.StringIO('an_url/'),
203 ),
204 (
205 'an_url/',
206 {'data': body, 'content_type': content_type, 'retry_50x': False},
207 StringIO.StringIO('ok42'),
208 ),
209 ]
210 result = isolateserver_archive.upload_hash_content_to_blobstore(
211 path + 'gen_url?foo#bar', data[:], s, content)
212 self.assertEqual('ok42', result)
213
214 def test_upload_blobstore_retry_500(self):
215 content = 'blob_content'
216 s = hashlib.sha1(content).hexdigest()
217 path = 'http://example.com:80/'
218 data = [('token', 'foo bar')]
219 content_type, body = isolateserver_archive.encode_multipart_formdata(
220 data[:], [('content', s, 'blob_content')])
221 self._requests = [
222 (
223 path + 'gen_url?foo#bar',
224 {'data': data[:]},
225 StringIO.StringIO('an_url/'),
226 ),
227 (
228 'an_url/',
229 {'data': body, 'content_type': content_type, 'retry_50x': False},
230 # Let's say an HTTP 500 was returned.
231 None,
232 ),
233 # In that case, a new url must be generated since the last one may have
234 # been "consumed".
235 (
236 path + 'gen_url?foo#bar',
237 {'data': data[:]},
238 StringIO.StringIO('an_url/'),
239 ),
240 (
241 'an_url/',
242 {'data': body, 'content_type': content_type, 'retry_50x': False},
243 StringIO.StringIO('ok42'),
244 ),
245 ]
246 result = isolateserver_archive.upload_hash_content_to_blobstore(
247 path + 'gen_url?foo#bar', data[:], s, content)
248 self.assertEqual('ok42', result)
249
191 250
192 if __name__ == '__main__': 251 if __name__ == '__main__':
193 logging.basicConfig( 252 logging.basicConfig(
194 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) 253 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))
195 unittest.main() 254 unittest.main()
OLDNEW
« no previous file with comments | « run_isolated.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698