| Index: chrome/common/extensions/docs/server2/rietveld_patcher.py
|
| ===================================================================
|
| --- chrome/common/extensions/docs/server2/rietveld_patcher.py (revision 0)
|
| +++ chrome/common/extensions/docs/server2/rietveld_patcher.py (revision 0)
|
| @@ -0,0 +1,235 @@
|
| +# Copyright 2013 The Chromium Authors. All rights reserved.
|
| +# Use of this source code is governed by a BSD-style license that can be
|
| +# found in the LICENSE file.
|
| +
|
| +import json
|
| +import logging
|
| +import tarfile
|
| +from datetime import datetime, timedelta
|
| +from StringIO import StringIO
|
| +
|
| +from file_system import FileNotFoundError, ToUnicode
|
| +from future import Future
|
| +from patcher import Patcher
|
| +import svn_constants
|
| +
|
| +# Use a special value other than None to represent a deleted file in the patch.
|
| +_FILE_NOT_FOUND_VALUE = (None,)
|
| +
|
| +_ISSUE_CACHE_MAXAGE = timedelta(minutes=5)
|
| +
|
| +_CHROMIUM_REPO_BASEURLS = [
|
| + 'https://src.chromium.org/svn/trunk/src/',
|
| + 'http://src.chromium.org/svn/trunk/src/',
|
| + 'svn://svn.chromium.org/chrome/trunk/src',
|
| + 'https://chromium.googlesource.com/chromium/src.git@master',
|
| + 'http://git.chromium.org/chromium/src.git@master',
|
| +]
|
| +
|
| +_DOCS_PATHS = [
|
| + svn_constants.API_PATH,
|
| + svn_constants.TEMPLATE_PATH,
|
| + svn_constants.STATIC_PATH
|
| +]
|
| +
|
| +''' Append @patchset for keys to distinguish between different patchsets of an
|
| +issue.
|
| +'''
|
| +def _MakeKey(path_or_paths, patchset):
|
| + if isinstance(path_or_paths, list) or isinstance(path_or_paths, set):
|
| + return ['%s@%s' % (p, patchset) for p in path_or_paths]
|
| + return _MakeKey([path_or_paths], patchset)[0]
|
| +
|
| +def _ToObjectStoreValue(value, patchset):
|
| + return {_MakeKey(key, patchset): value[key] for key in value}
|
| +
|
| +def _FromObjectStoreValue(value):
|
| + return {key[0:key.find('@')]: value[key] for key in value}
|
| +
|
| +class _AsyncFetchFuture(object):
|
| + def __init__(self,
|
| + base_path,
|
| + paths,
|
| + cached_files,
|
| + missing_paths,
|
| + binary,
|
| + issue,
|
| + patchset,
|
| + patched_files,
|
| + fetcher,
|
| + object_store):
|
| + self._base_path = base_path
|
| + self._paths = paths
|
| + self._cached_value = cached_files
|
| + self._missing_paths = missing_paths
|
| + self._binary = binary
|
| + self._issue = issue
|
| + self._patchset = patchset
|
| + self._files = []
|
| + for files in patched_files:
|
| + self._files += files
|
| + self._object_store = object_store
|
| + if missing_paths is not None:
|
| + logging.info('Fetching tarball/%s/%s' % (issue, patchset))
|
| + self._tarball = fetcher.FetchAsync('tarball/%s/%s' % (issue, patchset))
|
| +
|
| + def _GetMissingPaths(self):
|
| + tarball_result = self._tarball.Get()
|
| + if tarball_result.status_code != 200:
|
| + raise FileNotFoundError(
|
| + 'Failed to download tarball for issue %s patchset %s. Status: %s' %
|
| + (self._issue, self._patchset, tarball_result.status_code))
|
| +
|
| + try:
|
| + tar = tarfile.open(fileobj=StringIO(tarball_result.content))
|
| + except tarfile.TarError as e:
|
| + raise FileNotFoundError('Invalid tarball for issue %s patchset %s.' %
|
| + (self._issue, self._patchset))
|
| +
|
| + self._uncached_value = {}
|
| + for path in self._files:
|
| + if path in self._cached_value:
|
| + continue
|
| +
|
| + if self._base_path:
|
| + tar_path = 'b/%s/%s' % (self._base_path, path)
|
| + else:
|
| + tar_path = 'b/%s' % path
|
| + try:
|
| + patched_file = tar.extractfile(tar_path)
|
| + data = patched_file.read()
|
| + # In the unlikely case that the tarball is corrupted, throw
|
| + # FileNotFoundError instead of 500 Internal Server Error.
|
| + except tarfile.TarError as e:
|
| + raise FileNotFoundError(
|
| + 'Error extracting tarball for issue %s patchset %s.' %
|
| + (self._issue, self._patchset))
|
| + finally:
|
| + patched_file.close()
|
| +
|
| + # Deleted files still exist in the tarball, but they are empty.
|
| + if len(data) == 0:
|
| + # Mark it empty instead of throwing FileNotFoundError here to make sure
|
| + # self._object_store.SetMulti below is called and all files read are
|
| + # cached.
|
| + self._uncached_value[path] = _FILE_NOT_FOUND_VALUE
|
| + elif self._binary:
|
| + self._uncached_value[path] = data
|
| + else:
|
| + self._uncached_value[path] = ToUnicode(data)
|
| +
|
| + self._object_store.SetMulti(_ToObjectStoreValue(self._uncached_value,
|
| + self._patchset))
|
| +
|
| + for path in self._missing_paths:
|
| + if self._uncached_value.get(path) is None:
|
| + raise FileNotFoundError('File %s was not found in the patch.' % path)
|
| + self._cached_value[path] = self._uncached_value[path]
|
| +
|
| + def Get(self):
|
| + if self._missing_paths is not None:
|
| + self._GetMissingPaths()
|
| +
|
| + # Make sure all paths exist before returning.
|
| + for path in self._paths:
|
| + if self._cached_value[path] == _FILE_NOT_FOUND_VALUE:
|
| + raise FileNotFoundError('File %s was deleted in the patch.' % path)
|
| + return self._cached_value
|
| +
|
| +class RietveldPatcher(Patcher):
|
| + ''' Class to fetch resources from a patchset in Rietveld.
|
| + '''
|
| + def __init__(self,
|
| + base_path,
|
| + issue,
|
| + fetcher,
|
| + object_store_creator_factory):
|
| + self._base_path = base_path
|
| + self._issue = issue
|
| + self._fetcher = fetcher
|
| + self._object_store = object_store_creator_factory.Create(
|
| + RietveldPatcher).Create()
|
| +
|
| + def GetVersion(self):
|
| + return self._GetPatchset()
|
| +
|
| + def _GetPatchset(self):
|
| + value = self._object_store.Get(self._issue).Get()
|
| + if value is not None:
|
| + patchset, time = value
|
| + if datetime.now() - time < _ISSUE_CACHE_MAXAGE:
|
| + return patchset
|
| +
|
| + try:
|
| + issue_json = json.loads(self._fetcher.Fetch(
|
| + 'api/%s' % self._issue).content)
|
| + except Exception as e:
|
| + return None
|
| +
|
| + if issue_json.get('closed'):
|
| + return None
|
| +
|
| + patchsets = issue_json.get('patchsets')
|
| + if not isinstance(patchsets, list) or len(patchsets) == 0:
|
| + return None
|
| +
|
| + if not issue_json.get('base_url') in _CHROMIUM_REPO_BASEURLS:
|
| + return None
|
| +
|
| + patchset = str(patchsets[-1])
|
| + self._object_store.Set(self._issue, (patchset, datetime.now()))
|
| + return patchset
|
| +
|
| + def GetPatchedFiles(self):
|
| + patchset = self._GetPatchset()
|
| + object_store_key = '@%s' % patchset
|
| + patched_files = self._object_store.Get(object_store_key).Get()
|
| + if patched_files is not None:
|
| + return patched_files
|
| +
|
| + try:
|
| + patchset_json = json.loads(self._fetcher.Fetch(
|
| + 'api/%s/%s' % (self._issue, patchset)).content)
|
| + except Exception as e:
|
| + return ([], [], [])
|
| +
|
| + files = patchset_json.get('files')
|
| + if files is None or not isinstance(files, dict):
|
| + return ([], [], [])
|
| +
|
| + added = []
|
| + deleted = []
|
| + modified = []
|
| + for key in files:
|
| + f = key.split(self._base_path + '/', 1)[1]
|
| + if any(f.startswith(path) for path in _DOCS_PATHS):
|
| + status = (files[key].get('status') or 'M').strip()
|
| + if status == 'A':
|
| + added.append(f)
|
| + elif status == 'D':
|
| + deleted.append(f)
|
| + else:
|
| + modified.append(f)
|
| +
|
| + patched_files = (added, deleted, modified)
|
| + self._object_store.Set(object_store_key, patched_files)
|
| + return patched_files
|
| +
|
| + def Apply(self, paths, file_system, binary=False):
|
| + patchset = self._GetPatchset()
|
| + cached_files = _FromObjectStoreValue(
|
| + self._object_store.GetMulti(_MakeKey(paths, patchset)).Get())
|
| + missing_paths = list(set(paths) - set(cached_files.keys()))
|
| + if len(missing_paths) == 0:
|
| + missing_paths = None
|
| + return Future(delegate=_AsyncFetchFuture(
|
| + self._base_path,
|
| + paths,
|
| + cached_files,
|
| + missing_paths,
|
| + binary,
|
| + self._issue,
|
| + patchset,
|
| + self.GetPatchedFiles(),
|
| + self._fetcher,
|
| + self._object_store))
|
|
|
| Property changes on: chrome/common/extensions/docs/server2/rietveld_patcher.py
|
| ___________________________________________________________________
|
| Added: svn:eol-style
|
| + LF
|
|
|
|
|