Chromium Code Reviews
|
| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 import json | |
| 6 import logging | |
| 7 import tarfile | |
| 8 from datetime import datetime, timedelta | |
| 9 from StringIO import StringIO | |
| 10 | |
| 11 from file_system import FileNotFoundError, ToUnicode | |
| 12 from future import Future | |
| 13 from patched_file_system import Patcher | |
| 14 import svn_constants | |
| 15 | |
| 16 # Use a special value other than None to represent a deleted file in the patch. | |
| 17 _FILE_NOT_FOUND_VALUE = (None,) | |
|
not at google - send to devlin
2013/05/03 19:12:22
This is a bit messy, can you store the deleted fil
| |
| 18 | |
| 19 _ISSUE_CACHE_MAXAGE = timedelta(minutes=5) | |
| 20 | |
| 21 _CHROMIUM_REPO_BASEURLS = [ | |
| 22 'https://src.chromium.org/svn/trunk/src/', | |
| 23 'http://src.chromium.org/svn/trunk/src/', | |
| 24 'svn://svn.chromium.org/chrome/trunk/src', | |
| 25 'https://chromium.googlesource.com/chromium/src.git@master', | |
| 26 'http://git.chromium.org/chromium/src.git@master', | |
| 27 ] | |
| 28 | |
| 29 _DOCS_PATHS = [ | |
| 30 svn_constants.API_PATH, | |
| 31 svn_constants.TEMPLATE_PATH, | |
| 32 svn_constants.STATIC_PATH | |
| 33 ] | |
| 34 | |
| 35 ''' Append @patchset for keys to distinguish between different patchsets of an | |
| 36 issue. | |
| 37 ''' | |
| 38 def _MakeKey(path_or_paths, patchset): | |
| 39 if isinstance(path_or_paths, list): | |
| 40 result = [] | |
| 41 for p in path_or_paths: | |
| 42 result.append('%s@%s' % (p, patchset)) | |
| 43 return result | |
|
not at google - send to devlin
2013/05/03 19:12:22
use a list comprehension
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 44 else: | |
|
not at google - send to devlin
2013/05/03 19:12:22
previous statement is a return, don't need an else
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 45 return _MakeKey([path_or_paths], patchset)[0] | |
| 46 | |
| 47 def _ToObjectStoreValue(value, patchset): | |
| 48 result = {} | |
| 49 for key in value: | |
| 50 result[_MakeKey(key, patchset)] = value[key] | |
| 51 return result | |
|
not at google - send to devlin
2013/05/03 19:12:22
comprehension here too
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 52 | |
| 53 def _FromObjectStoreValue(value): | |
| 54 result = {} | |
|
not at google - send to devlin
2013/05/03 19:12:22
and here
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 55 for key in value: | |
| 56 result[key[0:key.find('@')]] = value[key] | |
| 57 return result | |
| 58 | |
| 59 class _AsyncFetchFuture(object): | |
| 60 def __init__(self, | |
| 61 base_path, | |
| 62 paths, | |
| 63 cached_files, | |
| 64 missing_paths, | |
| 65 binary, | |
| 66 issue, | |
| 67 patchset, | |
| 68 patched_files, | |
| 69 fetcher, | |
| 70 object_store): | |
| 71 self._base_path = base_path | |
| 72 self._paths = paths | |
| 73 self._cached_value = cached_files | |
| 74 self._missing_paths = missing_paths | |
| 75 self._binary = binary | |
| 76 self._issue = issue | |
| 77 self._patchset = patchset | |
| 78 self._files = [] | |
| 79 for files in patched_files: | |
| 80 self._files += files | |
| 81 self._object_store = object_store | |
| 82 if missing_paths is not None: | |
| 83 logging.info('Fetching tarball/%s/%s' % (issue, patchset)) | |
| 84 self._tarball = fetcher.FetchAsync('tarball/%s/%s' % (issue, patchset)) | |
| 85 | |
| 86 def _GetMissingPaths(self): | |
| 87 tarball_result = self._tarball.Get() | |
| 88 if tarball_result.status_code != 200: | |
| 89 raise FileNotFoundError( | |
| 90 'Failed to download tarball for issue %s patchset %s. Status: %s' % | |
| 91 (self._issue, self._patchset, tarball_result.status_code)) | |
| 92 | |
|
not at google - send to devlin
2013/05/03 19:12:22
It would actually be nice to show a more informati
| |
| 93 try: | |
| 94 tar = tarfile.open(fileobj=StringIO(tarball_result.content)) | |
| 95 except tarfile.TarError as e: | |
| 96 raise FileNotFoundError('Invalid tarball for issue %s patchset %s.' % | |
| 97 (self._issue, self._patchset)) | |
| 98 | |
| 99 self._uncached_value = {} | |
| 100 for path in self._files: | |
| 101 if self._cached_value.get(path) is not None: | |
|
not at google - send to devlin
2013/05/03 19:12:22
"path in self._cached_value"?
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 102 continue | |
| 103 | |
| 104 if self._base_path: | |
| 105 tar_path = 'b/%s/%s' % (self._base_path, path) | |
| 106 else: | |
| 107 tar_path = 'b/%s' % path | |
| 108 try: | |
| 109 patched_file = tar.extractfile(tar_path) | |
| 110 data = patched_file.read() | |
| 111 # In the unlikely case that the tarball is corrupted, throw | |
| 112 # FileNotFoundError instead of 500 Internal Server Error. | |
| 113 except tarfile.TarError as e: | |
| 114 raise FileNotFoundError( | |
| 115 'Error extracting tarball for issue %s patchset %s.' % | |
| 116 (self._issue, self._patchset)) | |
| 117 finally: | |
| 118 patched_file.close() | |
| 119 | |
| 120 # Deleted files still exist in the tarball, but they are empty. | |
| 121 if len(data) == 0: | |
| 122 # Mark it empty instead of throwing FileNotFoundError here to make sure | |
| 123 # self._object_store.SetMulti below is called and all files read are | |
| 124 # cached. | |
| 125 self._uncached_value[path] = _FILE_NOT_FOUND_VALUE | |
| 126 elif self._binary: | |
| 127 self._uncached_value[path] = data | |
| 128 else: | |
| 129 self._uncached_value[path] = ToUnicode(data) | |
| 130 | |
| 131 self._object_store.SetMulti(_ToObjectStoreValue(self._uncached_value, | |
| 132 self._patchset)) | |
| 133 | |
| 134 for path in self._missing_paths: | |
| 135 if self._uncached_value.get(path) is None: | |
| 136 raise FileNotFoundError('File %s was not found in the patch.' % path) | |
| 137 self._cached_value[path] = self._uncached_value[path] | |
| 138 | |
| 139 def Get(self): | |
| 140 if self._missing_paths is not None: | |
| 141 self._GetMissingPaths() | |
| 142 | |
| 143 # Make sure all paths exist before returning. | |
| 144 for path in self._paths: | |
| 145 if self._cached_value[path] == _FILE_NOT_FOUND_VALUE: | |
| 146 raise FileNotFoundError('File %s was deleted in the patch.' % path) | |
| 147 return self._cached_value | |
| 148 | |
| 149 class RietveldPatcher(Patcher): | |
| 150 ''' Class to fetch resources from a patchset in Rietveld. | |
| 151 ''' | |
| 152 def __init__(self, | |
| 153 base_path, | |
| 154 issue, | |
| 155 fetcher, | |
| 156 object_store_creator_factory): | |
| 157 self._base_path = base_path | |
| 158 self._issue = issue | |
| 159 self._fetcher = fetcher | |
| 160 self._object_store = object_store_creator_factory.Create( | |
| 161 RietveldPatcher).Create() | |
|
not at google - send to devlin
2013/05/03 19:12:22
For the _FILE_NOT_FOUND_VALUE stuff,
self._modifi
| |
| 162 | |
| 163 def GetVersion(self): | |
|
not at google - send to devlin
2013/05/03 19:12:22
No longer used, can delete this.
| |
| 164 return self._GetPatchset() | |
| 165 | |
| 166 def _GetPatchset(self): | |
| 167 value = self._object_store.Get(self._issue).Get() | |
| 168 if value is not None: | |
| 169 patchset, time = value | |
| 170 if datetime.now() - time < _ISSUE_CACHE_MAXAGE: | |
|
not at google - send to devlin
2013/05/03 19:12:22
We should implement the caching with a CachingPatc
方觉(Fang Jue)
2013/05/04 02:05:41
Maybe GetPatchset without cache when constructing
not at google - send to devlin
2013/05/04 06:47:43
but new patches can be uploaded. That part needs t
| |
| 171 return patchset | |
| 172 | |
| 173 try: | |
| 174 issue_json = json.loads(self._fetcher.Fetch( | |
| 175 'api/%s' % self._issue).content) | |
| 176 except Exception as e: | |
| 177 return None | |
| 178 | |
| 179 if issue_json.get('closed'): | |
| 180 return None | |
| 181 | |
| 182 patchsets = issue_json.get('patchsets') | |
| 183 if not isinstance(patchsets, list) or len(patchsets) == 0: | |
| 184 return None | |
| 185 | |
| 186 if not issue_json.get('base_url') in _CHROMIUM_REPO_BASEURLS: | |
| 187 return None | |
| 188 | |
| 189 patchset = str(patchsets[-1]) | |
| 190 self._object_store.Set(self._issue, (patchset, datetime.now())) | |
| 191 return patchset | |
| 192 | |
| 193 def GetPatchedFiles(self): | |
| 194 patchset = self._GetPatchset() | |
| 195 object_store_key = '@%s' % patchset | |
| 196 empty = ([], [], []) | |
|
not at google - send to devlin
2013/05/03 19:12:22
having this as a variable is a bit less readable t
方觉(Fang Jue)
2013/05/04 02:05:41
Done.
| |
| 197 patched_files = self._object_store.Get(object_store_key).Get() | |
|
not at google - send to devlin
2013/05/03 19:12:22
you could also store the list of files in a separa
| |
| 198 if patched_files is not None: | |
| 199 return patched_files | |
| 200 | |
| 201 try: | |
| 202 patchset_json = json.loads(self._fetcher.Fetch( | |
| 203 'api/%s/%s' % (self._issue, patchset)).content) | |
| 204 except Exception as e: | |
| 205 return empty | |
| 206 | |
| 207 files = patchset_json.get('files') | |
| 208 if files is None or not isinstance(files, dict): | |
| 209 return empty | |
| 210 | |
| 211 added = [] | |
| 212 deleted = [] | |
| 213 modified = [] | |
| 214 for key in files: | |
| 215 f = key.split(self._base_path + '/', 1)[1] | |
| 216 if any(f.startswith(path) for path in _DOCS_PATHS): | |
| 217 status = (files[key].get('status') or 'M').strip() | |
| 218 if status == 'A': | |
| 219 added.append(f) | |
| 220 elif status == 'D': | |
| 221 deleted.append(f) | |
| 222 else: | |
| 223 modified.append(f) | |
| 224 | |
| 225 patched_files = (added, deleted, modified) | |
| 226 self._object_store.Set(object_store_key, patched_files) | |
| 227 return patched_files | |
| 228 | |
| 229 def Apply(self, paths, file_system, binary=False): | |
| 230 patchset = self._GetPatchset() | |
| 231 cached_files = _FromObjectStoreValue( | |
| 232 self._object_store.GetMulti(_MakeKey(paths, patchset)).Get()) | |
| 233 missing_paths = list(set(paths) - set(cached_files.keys())) | |
| 234 if len(missing_paths) == 0: | |
| 235 missing_paths = None | |
| 236 return Future(delegate=_AsyncFetchFuture( | |
| 237 self._base_path, | |
| 238 paths, | |
| 239 cached_files, | |
| 240 missing_paths, | |
| 241 binary, | |
| 242 self._issue, | |
| 243 patchset, | |
| 244 self.GetPatchedFiles(), | |
| 245 self._fetcher, | |
| 246 self._object_store)) | |
| OLD | NEW |