Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4556)

Unified Diff: chrome/common/extensions/docs/server2/rietveld_patcher.py

Issue 14125010: Docserver: Add support for viewing docs with a codereview patch applied (Closed) Base URL: https://src.chromium.org/svn/trunk/src/
Patch Set: Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/common/extensions/docs/server2/rietveld_patcher.py
===================================================================
--- chrome/common/extensions/docs/server2/rietveld_patcher.py (revision 0)
+++ chrome/common/extensions/docs/server2/rietveld_patcher.py (revision 0)
@@ -0,0 +1,289 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import tarfile
+from datetime import datetime, timedelta
+from StringIO import StringIO
+
+from file_system import FileNotFoundError, ToUnicode
+from future import Future
+from patcher import Patcher
+import svn_constants
+
+# Use a special value other than None to represent a deleted file in the patch.
+_FILE_NOT_FOUND_VALUE = (None,)
+
+_ISSUE_CACHE_MAXAGE = timedelta(seconds=5)
+
+_CHROMIUM_REPO_BASEURLS = [
+ 'https://src.chromium.org/svn/trunk/src/',
+ 'http://src.chromium.org/svn/trunk/src/',
+ 'svn://svn.chromium.org/chrome/trunk/src',
+ 'https://chromium.googlesource.com/chromium/src.git@master',
+ 'http://git.chromium.org/chromium/src.git@master',
+]
+
+_DOCS_PATHS = [
+ svn_constants.API_PATH,
+ svn_constants.TEMPLATE_PATH,
+ svn_constants.STATIC_PATH
+]
+
+def _HandleBinary(data, binary):
+ return data if binary else ToUnicode(data)
+
+class RietveldPatcherError(Exception):
+ def __init__(self, message):
+ self.message = message
+
+class _AsyncUncachedFuture(object):
+ def __init__(self,
+ cache,
+ version,
+ paths,
+ binary,
+ cached_value,
+ missing_paths,
+ fetch_delegate):
+ self._cache = cache
+ self._version = version
+ self._paths = paths
+ self._binary = binary
+ self._cached_value = cached_value
+ self._missing_paths = missing_paths
+ self._fetch_delegate = fetch_delegate
+
+ def Get(self):
+ uncached_raw_value = self._fetch_delegate.Get()
+ self._cache.CacheFiles(uncached_raw_value, self._version)
+
+ for path in self._missing_paths:
+ if uncached_raw_value.get(path) is None:
+ raise FileNotFoundError('File %s was not found in the patch.' % path)
+ self._cached_value[path] = _HandleBinary(uncached_raw_value[path],
+ self._binary)
+
+ # Make sure all paths exist before returning.
+ for path in self._paths:
+ if self._cached_value[path] == _FILE_NOT_FOUND_VALUE:
+ raise FileNotFoundError('File %s was deleted in the patch.' % path)
+ return self._cached_value
+
+class _AsyncFetchFuture(object):
+ def __init__(self,
+ base_path,
+ issue,
+ patchset,
+ patched_files,
+ paths_to_skip,
+ fetcher):
+ self._base_path = base_path
+ self._issue = issue
+ self._patchset = patchset
+ self._files = set()
+ for files in patched_files:
+ self._files |= set(files) - set(paths_to_skip)
+ self._tarball = fetcher.FetchAsync('tarball/%s/%s' % (issue, patchset))
+
+ def Get(self):
+ tarball_result = self._tarball.Get()
+ if tarball_result.status_code != 200:
+ raise RietveldPatcherError(
+ 'Failed to download tarball for issue %s patchset %s. Status: %s' %
+ (self._issue, self._patchset, tarball_result.status_code))
+
+ try:
+ tar = tarfile.open(fileobj=StringIO(tarball_result.content))
+ except tarfile.TarError as e:
+ raise RietveldPatcherError('Invalid tarball for issue %s patchset %s.' %
+ (self._issue, self._patchset))
+
+ self._value = {}
+ for path in self._files:
+ if self._base_path:
+ tar_path = 'b/%s/%s' % (self._base_path, path)
+ else:
+ tar_path = 'b/%s' % path
+
+ patched_file = None
+ try:
+ patched_file = tar.extractfile(tar_path)
+ data = patched_file.read()
+ except tarfile.TarError as e:
+ # Show appropriate error message in the unlikely case that the tarball
+ # is corrupted.
+ raise RietveldPatcherError(
+ 'Error extracting tarball for issue %s patchset %s file %s.' %
+ (self._issue, self._patchset, tar_path))
+ finally:
+ if patched_file:
+ patched_file.close()
+
+ # Deleted files still exist in the tarball, but they are empty.
+ if len(data) == 0:
+ # Mark it empty instead of throwing FileNotFoundError here to make sure
+ # this method completes and returns values to cache.
+ self._value[path] = _FILE_NOT_FOUND_VALUE
+ else:
+ self._value[path] = data
+
+ return self._value
+
+class RietveldPatcher(Patcher):
+ ''' Class to fetch resources from a patchset in Rietveld.
+ '''
+ class _Cache(object):
not at google - send to devlin 2013/05/07 05:45:40 the caching logic should all be pulled out into a
方觉(Fang Jue) 2013/05/07 06:03:55 I'll try to do that. But I don't think it's possib
not at google - send to devlin 2013/05/07 06:17:21 What do you mean "fetches all patched files at a t
+ def __init__(self, object_store_creator_factory):
+ self._issue_object_store = object_store_creator_factory.Create(
+ RietveldPatcher).Create(category='issue')
+ self._file_object_store = object_store_creator_factory.Create(
+ RietveldPatcher).Create(category='file')
+
+ def GetPatchset(self, fetch_function):
+ key = 'patchset'
+ value = self._issue_object_store.Get(key).Get()
+ if value is not None:
+ patchset, time = value
+ if datetime.now() - time < _ISSUE_CACHE_MAXAGE:
+ return patchset
+
+ patchset = fetch_function()
+ self._issue_object_store.Set(key, (patchset, datetime.now()))
+ return patchset
+
+ def GetPatchedFiles(self, version, fetch_function):
+ value = self._issue_object_store.Get(version).Get()
+ if value is not None:
+ return value
+
+ value = fetch_function()
+ self._issue_object_store.Set(version, value)
+ return value
+
+ ''' Append @version for keys to distinguish between different patchsets of
+ an issue.
+ '''
+ def _MakeKey(self, path_or_paths, version):
+ if isinstance(path_or_paths, list) or isinstance(path_or_paths, set):
+ return ['%s@%s' % (p, version) for p in path_or_paths]
+ return self._MakeKey([path_or_paths], version)[0]
+
+ def _ToObjectStoreValue(self, raw_value, version):
+ return {self._MakeKey(key, version): raw_value[key] for key in raw_value}
+
+ def _FromObjectStoreValue(self, raw_value, binary):
+ return {key[0:key.find('@')]: _HandleBinary(raw_value[key], binary)
+ for key in raw_value}
+
+ def Apply(self, version, paths, binary, fetch_future_function):
+ cached_value = self._FromObjectStoreValue(self._file_object_store.
+ GetMulti(self._MakeKey(paths, version)).Get(), binary)
+ missing_paths = list(set(paths) - set(cached_value.keys()))
+ if len(missing_paths) == 0:
+ return Future(value=cached_value)
+
+ return _AsyncUncachedFuture(self,
+ version,
+ paths,
+ binary,
+ cached_value,
+ missing_paths,
+ fetch_future_function(cached_value.keys()))
+
+ def CacheFiles(self, uncached_raw_value, version):
+ self._file_object_store.SetMulti(self._ToObjectStoreValue(
+ uncached_raw_value, version))
+
+ def __init__(self,
+ base_path,
+ issue,
+ fetcher,
+ object_store_creator_factory):
+ self._base_path = base_path
+ self._issue = issue
+ self._fetcher = fetcher
+ self._object_store = object_store_creator_factory.Create(
+ RietveldPatcher).Create()
+ self._cache = RietveldPatcher._Cache(object_store_creator_factory)
+
+ def GetVersion(self):
+ return self._GetPatchset()
+
+ def _GetPatchset(self):
+ return self._cache.GetPatchset(self._FetchPatchset)
+
+ def _FetchPatchset(self):
+ try:
+ issue_json = json.loads(self._fetcher.Fetch(
+ 'api/%s' % self._issue).content)
+ except Exception as e:
+ raise RietveldPatcherError(
+ 'Failed to fetch information for issue %s.' % self._issue)
+
+ if issue_json.get('closed'):
+ raise RietveldPatcherError('Issue %s has been closed.' % self._issue)
+
+ patchsets = issue_json.get('patchsets')
+ if not isinstance(patchsets, list) or len(patchsets) == 0:
+ raise RietveldPatcherError('Cannot parse issue %s.' % self._issue)
+
+ if not issue_json.get('base_url') in _CHROMIUM_REPO_BASEURLS:
+ raise RietveldPatcherError('Issue %s\'s base url is unknown.' %
+ self._issue)
+
+ return str(patchsets[-1])
+
+ def GetPatchedFiles(self):
+ return self._cache.GetPatchedFiles(self.GetVersion(),
+ self._FetchPatchedFiles)
+
+ def _FetchPatchedFiles(self):
+ patchset = self.GetVersion()
+ try:
+ patchset_json = json.loads(self._fetcher.Fetch(
+ 'api/%s/%s' % (self._issue, patchset)).content)
+ except Exception as e:
+ raise RietveldPatcherError(
+ 'Failed to fetch details for issue %s patchset %s.' % (self._issue,
+ patchset))
+
+ files = patchset_json.get('files')
+ if files is None or not isinstance(files, dict):
+ raise RietveldPatcherError('Failed to parse issue %s patchset %s.' %
+ (self._issue, patchset))
+
+ added = []
+ deleted = []
+ modified = []
+ for key in files:
+ f = key.split(self._base_path + '/', 1)[1]
+ if any(f.startswith(path) for path in _DOCS_PATHS):
+ status = (files[key].get('status') or 'M')
+ # status can be 'A ' or 'A + '
+ if 'A' in status:
+ added.append(f)
+ elif 'D' in status:
+ deleted.append(f)
+ else:
+ modified.append(f)
+
+ return (added, deleted, modified)
+
+ def Apply(self, paths, file_system, binary=False):
+ _, deleted, _ = self.GetPatchedFiles()
+ if set(deleted) & set(paths):
+ raise FileNotFoundError('File(s) %s are removed in the patch.' %
+ list(set(deleted) & set(paths)))
+
+ return self._cache.Apply(self.GetVersion(), paths, binary,
+ self._CreateFetchFuture)
+
+ def _CreateFetchFuture(self, paths_to_skip):
+ return Future(delegate=_AsyncFetchFuture(self._base_path,
+ self._issue,
+ self._GetPatchset(),
+ self.GetPatchedFiles(),
+ paths_to_skip,
+ self._fetcher))
Property changes on: chrome/common/extensions/docs/server2/rietveld_patcher.py
___________________________________________________________________
Added: svn:eol-style
+ LF

Powered by Google App Engine
This is Rietveld 408576698