Chromium Code Reviews
|
| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 import json | |
| 6 import logging | |
| 7 import tarfile | |
| 8 from StringIO import StringIO | |
| 9 | |
| 10 from file_system import FileNotFoundError, ToUnicode | |
| 11 from future import Future | |
| 12 from patched_file_system import Patcher | |
| 13 import svn_constants | |
| 14 | |
| 15 # Use a special value other than None to represent a deleted file in the patch. | |
| 16 _FILE_NOT_FOUND_VALUE = (None,) | |
|
not at google - send to devlin
2013/04/30 15:37:42
i usually use _FILE_NOT_FOUND_VALUE = object(), an
方觉(Fang Jue)
2013/05/01 15:27:25
Well, object() is not object() and object() != obj
| |
| 17 | |
| 18 CHROMIUM_REPO_BASEURLS = [ | |
| 19 'https://src.chromium.org/svn/trunk/src/', | |
| 20 'http://src.chromium.org/svn/trunk/src/', | |
| 21 'svn://svn.chromium.org/chrome/trunk/src', | |
| 22 'https://chromium.googlesource.com/chromium/src.git@master', | |
| 23 'http://git.chromium.org/chromium/src.git@master', | |
| 24 ] | |
| 25 DOCS_PATHS = [ | |
| 26 svn_constants.API_PATH, | |
| 27 svn_constants.INTRO_PATH, | |
| 28 svn_constants.ARTICLE_PATH, | |
| 29 svn_constants.PUBLIC_TEMPLATE_PATH, | |
| 30 svn_constants.PRIVATE_TEMPLATE_PATH, | |
| 31 svn_constants.JSON_PATH, | |
| 32 svn_constants.STATIC_PATH | |
| 33 ] | |
| 34 RIETVELD_ISSUE_JSON = 'api/%s' | |
| 35 RIETVELD_PATCHSET_JSON = 'api/%s/%s' | |
| 36 | |
| 37 class _AsyncFetchFuture(object): | |
| 38 def __init__(self, | |
| 39 base_path, | |
| 40 paths, | |
| 41 cached_files, | |
| 42 missing_paths, | |
| 43 binary, | |
| 44 issue, | |
| 45 patchset, | |
| 46 patched_files, | |
| 47 fetcher, | |
| 48 object_store): | |
| 49 self._base_path = base_path | |
| 50 self._paths = paths | |
| 51 self._cached_value = cached_files | |
| 52 self._missing_paths = missing_paths | |
| 53 self._binary = binary | |
| 54 self._files = [] | |
| 55 for files in patched_files: | |
| 56 self._files += files | |
| 57 self._object_store = object_store | |
| 58 if missing_paths is not None: | |
| 59 print 'Fetching tarball/%s/%s' % (issue, patchset) | |
|
not at google - send to devlin
2013/04/30 15:37:42
use logging.info. I think it's a useful log to hav
方觉(Fang Jue)
2013/05/01 15:27:25
Done.
| |
| 60 self._tarball = fetcher.FetchAsync('tarball/%s/%s' % (issue, patchset)) | |
| 61 | |
| 62 def _GetMissingPaths(self): | |
| 63 tarball_result = self._tarball.Get() | |
| 64 if tarball_result.status_code != 200: | |
| 65 raise FileNotFoundError('Failed to download tarball.') | |
|
not at google - send to devlin
2013/04/30 15:37:42
include status code?
方觉(Fang Jue)
2013/05/01 15:27:25
Done.
| |
| 66 | |
| 67 try: | |
| 68 tar = tarfile.open(fileobj=StringIO(tarball_result.content)) | |
| 69 except tarfile.TarError as e: | |
| 70 raise FileNotFoundError('Invalid tarball.') | |
|
not at google - send to devlin
2013/04/30 15:37:42
include some data about the exception?
方觉(Fang Jue)
2013/05/01 15:27:25
Done.
| |
| 71 | |
| 72 self._uncached_value = {} | |
| 73 for path in self._files: | |
| 74 if self._cached_value.get(path) is not None: | |
| 75 continue | |
| 76 | |
| 77 if self._base_path: | |
| 78 tar_path = 'b/%s/%s' % (self._base_path, path) | |
| 79 else: | |
| 80 tar_path = 'b/%s' % path | |
| 81 try: | |
| 82 patched_file = tar.extractfile(tar_path) | |
| 83 data = patched_file.read() | |
| 84 patched_file.close() | |
|
not at google - send to devlin
2013/04/30 15:37:42
in lieu of the "with..." thing, patched_file.close
方觉(Fang Jue)
2013/05/01 15:27:25
Done.
| |
| 85 except tarfile.TarError as e: | |
| 86 self._uncached_value[path] = _FILE_NOT_FOUND_VALUE | |
| 87 continue | |
| 88 | |
| 89 # Deleted files still exist in the tarball, but they are empty. | |
| 90 if len(data) == 0: | |
| 91 self._uncached_value[path] = _FILE_NOT_FOUND_VALUE | |
| 92 elif self._binary: | |
| 93 self._uncached_value[path] = data | |
| 94 else: | |
| 95 self._uncached_value[path] = ToUnicode(data) | |
| 96 | |
| 97 self._object_store.SetMulti(self._uncached_value) | |
| 98 | |
| 99 for path in self._missing_paths: | |
| 100 if self._uncached_value.get(path) is None: | |
| 101 raise FileNotFoundError('File %s was not found in the patch.' % path) | |
| 102 self._cached_value[path] = self._uncached_value[path] | |
| 103 | |
| 104 def Get(self): | |
| 105 if self._missing_paths is not None: | |
| 106 self._GetMissingPaths() | |
| 107 | |
| 108 # Make sure all paths exist before returning. | |
| 109 for path in self._paths: | |
| 110 if self._cached_value[path] == _FILE_NOT_FOUND_VALUE: | |
| 111 raise FileNotFoundError('File %s was deleted in the patch.' % path) | |
| 112 return self._cached_value | |
| 113 | |
| 114 class RietveldPatcher(Patcher): | |
|
not at google - send to devlin
2013/04/30 15:37:42
We need a RietveldPatcherTest, which should use th
| |
| 115 ''' Class to fetch resources from a patchset in Rietveld. | |
| 116 ''' | |
| 117 def __init__(self, | |
| 118 base_path, | |
| 119 issue, | |
| 120 fetcher, | |
| 121 object_store_creator_factory): | |
| 122 self._base_path = base_path | |
| 123 self._issue = issue | |
| 124 self._fetcher = fetcher | |
| 125 self._object_store = object_store_creator_factory.Create( | |
| 126 RietveldPatcher).Create() | |
| 127 self._object_store_creator_factory = object_store_creator_factory | |
| 128 | |
| 129 def GetVersion(self): | |
| 130 return self._GetPatchset() | |
|
not at google - send to devlin
2013/04/30 15:37:42
right - I perhaps make this
return self._GetPatch
方觉(Fang Jue)
2013/05/01 15:27:25
No. PatchedFileSystem interprets GetVersion() is N
| |
| 131 | |
| 132 def _GetPatchset(self): | |
| 133 key = '@%s' % self._issue | |
| 134 patchset = self._object_store.Get(key).Get() | |
|
not at google - send to devlin
2013/04/30 15:37:42
this is what category= is for in the Create() meth
| |
| 135 if patchset is not None: | |
| 136 return patchset | |
| 137 | |
| 138 try: | |
| 139 issue_json = json.loads(self._fetcher.Fetch( | |
| 140 RIETVELD_ISSUE_JSON % self._issue).content) | |
| 141 except Exception as e: | |
| 142 return None | |
| 143 | |
| 144 if issue_json.get('closed'): | |
| 145 return None | |
| 146 | |
| 147 patchsets = issue_json.get('patchsets') | |
| 148 if not isinstance(patchsets, list) or len(patchsets) == 0: | |
| 149 return None | |
| 150 | |
| 151 if not issue_json.get('base_url') in CHROMIUM_REPO_BASEURLS: | |
| 152 return None | |
| 153 | |
| 154 patchset = str(patchsets[-1]) | |
| 155 self._object_store.Set(key, patchset) | |
| 156 return patchset | |
| 157 | |
| 158 def GetPatchedFiles(self): | |
| 159 patchset = self._GetPatchset() | |
| 160 key = '@%s.%s' % (self._issue, patchset) | |
|
not at google - send to devlin
2013/04/30 15:37:42
We need a consistent scheme for this - it looks li
方觉(Fang Jue)
2013/05/01 15:27:25
In the latest patchset, GetPatchset uses an object
| |
| 161 empty = ([], [], []) | |
| 162 patched_files = self._object_store.Get(key).Get() | |
| 163 if patched_files is not None: | |
| 164 return patched_files | |
| 165 | |
| 166 try: | |
| 167 patchset_json = json.loads(self._fetcher.Fetch( | |
| 168 RIETVELD_PATCHSET_JSON % (self._issue, patchset)).content) | |
| 169 except Exception as e: | |
| 170 return empty | |
| 171 | |
| 172 files = patchset_json.get('files') | |
| 173 if files is None or not isinstance(files, dict): | |
| 174 return empty | |
| 175 | |
| 176 added = [] | |
| 177 deleted = [] | |
| 178 modified = [] | |
| 179 for key in files: | |
| 180 f = key.split(self._base_path + '/', 1)[1] | |
| 181 if (f.startswith(svn_constants.DOCS_PATH) or | |
| 182 f.startswith(svn_constants.API_PATH)): | |
| 183 status = (files[key].get('status') or 'M').strip() | |
| 184 if status == 'A': | |
| 185 added.append(f) | |
| 186 elif status == 'D': | |
| 187 deleted.append(f) | |
| 188 else: | |
| 189 modified.append(f) | |
| 190 | |
| 191 patched_files = (added, deleted, modified) | |
| 192 self._object_store.Set(key, patched_files) | |
| 193 return patched_files | |
| 194 | |
| 195 def Apply(self, paths, file_system, binary=False): | |
| 196 cached_files = self._object_store.GetMulti(paths).Get() | |
| 197 missing_paths = list(set(paths) - set(cached_files.keys())) | |
| 198 if len(missing_paths) == 0: | |
| 199 missing_paths = None | |
| 200 patchset = self._GetPatchset() | |
| 201 category = '' | |
| 202 for c in patchset: | |
| 203 if c.isdigit(): | |
| 204 category += 'abcdefghij'[int(c)] | |
| 205 else: | |
| 206 category += c | |
| 207 return Future(delegate=_AsyncFetchFuture( | |
| 208 self._base_path, | |
| 209 paths, | |
| 210 cached_files, | |
| 211 missing_paths, | |
| 212 binary, | |
| 213 self._issue, | |
| 214 patchset, | |
| 215 self.GetPatchedFiles(), | |
| 216 self._fetcher, | |
| 217 self._object_store_creator_factory.Create(RietveldPatcher). | |
|
not at google - send to devlin
2013/04/30 15:37:42
What is this for? I don't quite understand, but it
方觉(Fang Jue)
2013/05/01 15:27:25
Yes. It's too weird. I decide not to create separa
| |
| 218 Create(category))) | |
| OLD | NEW |