| OLD | NEW |
| (Empty) | |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 import json |
| 6 import logging |
| 7 import tarfile |
| 8 from datetime import datetime, timedelta |
| 9 from StringIO import StringIO |
| 10 |
| 11 from file_system import FileNotFoundError, ToUnicode |
| 12 from future import Future |
| 13 from patcher import Patcher |
| 14 import svn_constants |
| 15 |
| 16 # Use a special value other than None to represent a deleted file in the patch. |
| 17 _FILE_NOT_FOUND_VALUE = (None,) |
| 18 |
| 19 _ISSUE_CACHE_MAXAGE = timedelta(minutes=5) |
| 20 |
| 21 _CHROMIUM_REPO_BASEURLS = [ |
| 22 'https://src.chromium.org/svn/trunk/src/', |
| 23 'http://src.chromium.org/svn/trunk/src/', |
| 24 'svn://svn.chromium.org/chrome/trunk/src', |
| 25 'https://chromium.googlesource.com/chromium/src.git@master', |
| 26 'http://git.chromium.org/chromium/src.git@master', |
| 27 ] |
| 28 |
| 29 _DOCS_PATHS = [ |
| 30 svn_constants.API_PATH, |
| 31 svn_constants.TEMPLATE_PATH, |
| 32 svn_constants.STATIC_PATH |
| 33 ] |
| 34 |
| 35 ''' Append @patchset for keys to distinguish between different patchsets of an |
| 36 issue. |
| 37 ''' |
| 38 def _MakeKey(path_or_paths, patchset): |
| 39 if isinstance(path_or_paths, list) or isinstance(path_or_paths, set): |
| 40 return ['%s@%s' % (p, patchset) for p in path_or_paths] |
| 41 return _MakeKey([path_or_paths], patchset)[0] |
| 42 |
| 43 def _ToObjectStoreValue(value, patchset): |
| 44 return {_MakeKey(key, patchset): value[key] for key in value} |
| 45 |
| 46 def _FromObjectStoreValue(value): |
| 47 return {key[0:key.find('@')]: value[key] for key in value} |
| 48 |
| 49 class _AsyncFetchFuture(object): |
| 50 def __init__(self, |
| 51 base_path, |
| 52 paths, |
| 53 cached_files, |
| 54 missing_paths, |
| 55 binary, |
| 56 issue, |
| 57 patchset, |
| 58 patched_files, |
| 59 fetcher, |
| 60 object_store): |
| 61 self._base_path = base_path |
| 62 self._paths = paths |
| 63 self._cached_value = cached_files |
| 64 self._missing_paths = missing_paths |
| 65 self._binary = binary |
| 66 self._issue = issue |
| 67 self._patchset = patchset |
| 68 self._files = [] |
| 69 for files in patched_files: |
| 70 self._files += files |
| 71 self._object_store = object_store |
| 72 if missing_paths is not None: |
| 73 logging.info('Fetching tarball/%s/%s' % (issue, patchset)) |
| 74 self._tarball = fetcher.FetchAsync('tarball/%s/%s' % (issue, patchset)) |
| 75 |
| 76 def _GetMissingPaths(self): |
| 77 tarball_result = self._tarball.Get() |
| 78 if tarball_result.status_code != 200: |
| 79 raise FileNotFoundError( |
| 80 'Failed to download tarball for issue %s patchset %s. Status: %s' % |
| 81 (self._issue, self._patchset, tarball_result.status_code)) |
| 82 |
| 83 try: |
| 84 tar = tarfile.open(fileobj=StringIO(tarball_result.content)) |
| 85 except tarfile.TarError as e: |
| 86 raise FileNotFoundError('Invalid tarball for issue %s patchset %s.' % |
| 87 (self._issue, self._patchset)) |
| 88 |
| 89 self._uncached_value = {} |
| 90 for path in self._files: |
| 91 if path in self._cached_value: |
| 92 continue |
| 93 |
| 94 if self._base_path: |
| 95 tar_path = 'b/%s/%s' % (self._base_path, path) |
| 96 else: |
| 97 tar_path = 'b/%s' % path |
| 98 try: |
| 99 patched_file = tar.extractfile(tar_path) |
| 100 data = patched_file.read() |
| 101 # In the unlikely case that the tarball is corrupted, throw |
| 102 # FileNotFoundError instead of 500 Internal Server Error. |
| 103 except tarfile.TarError as e: |
| 104 raise FileNotFoundError( |
| 105 'Error extracting tarball for issue %s patchset %s.' % |
| 106 (self._issue, self._patchset)) |
| 107 finally: |
| 108 patched_file.close() |
| 109 |
| 110 # Deleted files still exist in the tarball, but they are empty. |
| 111 if len(data) == 0: |
| 112 # Mark it empty instead of throwing FileNotFoundError here to make sure |
| 113 # self._object_store.SetMulti below is called and all files read are |
| 114 # cached. |
| 115 self._uncached_value[path] = _FILE_NOT_FOUND_VALUE |
| 116 elif self._binary: |
| 117 self._uncached_value[path] = data |
| 118 else: |
| 119 self._uncached_value[path] = ToUnicode(data) |
| 120 |
| 121 self._object_store.SetMulti(_ToObjectStoreValue(self._uncached_value, |
| 122 self._patchset)) |
| 123 |
| 124 for path in self._missing_paths: |
| 125 if self._uncached_value.get(path) is None: |
| 126 raise FileNotFoundError('File %s was not found in the patch.' % path) |
| 127 self._cached_value[path] = self._uncached_value[path] |
| 128 |
| 129 def Get(self): |
| 130 if self._missing_paths is not None: |
| 131 self._GetMissingPaths() |
| 132 |
| 133 # Make sure all paths exist before returning. |
| 134 for path in self._paths: |
| 135 if self._cached_value[path] == _FILE_NOT_FOUND_VALUE: |
| 136 raise FileNotFoundError('File %s was deleted in the patch.' % path) |
| 137 return self._cached_value |
| 138 |
| 139 class RietveldPatcher(Patcher): |
| 140 ''' Class to fetch resources from a patchset in Rietveld. |
| 141 ''' |
| 142 def __init__(self, |
| 143 base_path, |
| 144 issue, |
| 145 fetcher, |
| 146 object_store_creator_factory): |
| 147 self._base_path = base_path |
| 148 self._issue = issue |
| 149 self._fetcher = fetcher |
| 150 self._object_store = object_store_creator_factory.Create( |
| 151 RietveldPatcher).Create() |
| 152 |
| 153 def GetVersion(self): |
| 154 return self._GetPatchset() |
| 155 |
| 156 def _GetPatchset(self): |
| 157 value = self._object_store.Get(self._issue).Get() |
| 158 if value is not None: |
| 159 patchset, time = value |
| 160 if datetime.now() - time < _ISSUE_CACHE_MAXAGE: |
| 161 return patchset |
| 162 |
| 163 try: |
| 164 issue_json = json.loads(self._fetcher.Fetch( |
| 165 'api/%s' % self._issue).content) |
| 166 except Exception as e: |
| 167 return None |
| 168 |
| 169 if issue_json.get('closed'): |
| 170 return None |
| 171 |
| 172 patchsets = issue_json.get('patchsets') |
| 173 if not isinstance(patchsets, list) or len(patchsets) == 0: |
| 174 return None |
| 175 |
| 176 if not issue_json.get('base_url') in _CHROMIUM_REPO_BASEURLS: |
| 177 return None |
| 178 |
| 179 patchset = str(patchsets[-1]) |
| 180 self._object_store.Set(self._issue, (patchset, datetime.now())) |
| 181 return patchset |
| 182 |
| 183 def GetPatchedFiles(self): |
| 184 patchset = self._GetPatchset() |
| 185 object_store_key = '@%s' % patchset |
| 186 patched_files = self._object_store.Get(object_store_key).Get() |
| 187 if patched_files is not None: |
| 188 return patched_files |
| 189 |
| 190 try: |
| 191 patchset_json = json.loads(self._fetcher.Fetch( |
| 192 'api/%s/%s' % (self._issue, patchset)).content) |
| 193 except Exception as e: |
| 194 return ([], [], []) |
| 195 |
| 196 files = patchset_json.get('files') |
| 197 if files is None or not isinstance(files, dict): |
| 198 return ([], [], []) |
| 199 |
| 200 added = [] |
| 201 deleted = [] |
| 202 modified = [] |
| 203 for key in files: |
| 204 f = key.split(self._base_path + '/', 1)[1] |
| 205 if any(f.startswith(path) for path in _DOCS_PATHS): |
| 206 status = (files[key].get('status') or 'M').strip() |
| 207 if status == 'A': |
| 208 added.append(f) |
| 209 elif status == 'D': |
| 210 deleted.append(f) |
| 211 else: |
| 212 modified.append(f) |
| 213 |
| 214 patched_files = (added, deleted, modified) |
| 215 self._object_store.Set(object_store_key, patched_files) |
| 216 return patched_files |
| 217 |
| 218 def Apply(self, paths, file_system, binary=False): |
| 219 patchset = self._GetPatchset() |
| 220 cached_files = _FromObjectStoreValue( |
| 221 self._object_store.GetMulti(_MakeKey(paths, patchset)).Get()) |
| 222 missing_paths = list(set(paths) - set(cached_files.keys())) |
| 223 if len(missing_paths) == 0: |
| 224 missing_paths = None |
| 225 return Future(delegate=_AsyncFetchFuture( |
| 226 self._base_path, |
| 227 paths, |
| 228 cached_files, |
| 229 missing_paths, |
| 230 binary, |
| 231 self._issue, |
| 232 patchset, |
| 233 self.GetPatchedFiles(), |
| 234 self._fetcher, |
| 235 self._object_store)) |
| OLD | NEW |