| OLD | NEW |
| 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 # These are fake fetchers that are used for testing and the preview server. | 5 # These are fake fetchers that are used for testing and the preview server. |
| 6 # They return canned responses for URLs. appengine_wrappers.py uses the fake | 6 # They return canned responses for URLs. appengine_wrappers.py uses the fake |
| 7 # fetchers if the App Engine imports fail. | 7 # fetchers if the App Engine imports fail. |
| 8 | 8 |
| 9 import os | 9 import os |
| 10 import re | 10 import re |
| 11 import sys | 11 import sys |
| 12 | 12 |
| 13 import appengine_wrappers | 13 import appengine_wrappers |
| 14 from extensions_paths import SERVER2 |
| 15 from test_util import ReadFile, ChromiumPath |
| 14 import url_constants | 16 import url_constants |
| 15 | 17 |
| 16 | 18 |
| 19 # TODO(kalman): Investigate why logging in this class implies that the server |
| 20 # isn't properly caching some fetched files; often it fetches the same file |
| 21 # 10+ times. This may be a test anomaly. |
| 22 |
| 23 |
| 24 def _ReadTestData(*path, **read_args): |
| 25 return ReadFile(SERVER2, 'test_data', *path, **read_args) |
| 26 |
| 27 |
| 17 class _FakeFetcher(object): | 28 class _FakeFetcher(object): |
| 18 def __init__(self, base_path): | |
| 19 self._base_path = base_path | |
| 20 | |
| 21 def _ReadFile(self, path, mode='rb'): | |
| 22 with open(os.path.join(self._base_path, path), mode) as f: | |
| 23 return f.read() | |
| 24 | |
| 25 def _ListDir(self, path): | 29 def _ListDir(self, path): |
| 26 return os.listdir(os.path.join(self._base_path, path)) | 30 return os.listdir(path) |
| 27 | 31 |
| 28 def _IsDir(self, path): | 32 def _IsDir(self, path): |
| 29 return os.path.isdir(os.path.join(self._base_path, path)) | 33 return os.path.isdir(path) |
| 30 | 34 |
| 31 def _Stat(self, path): | 35 def _Stat(self, path): |
| 32 return int(os.stat(os.path.join(self._base_path, path)).st_mtime) | 36 return int(os.stat(path).st_mtime) |
| 33 | 37 |
| 34 | 38 |
| 35 class FakeOmahaProxy(_FakeFetcher): | 39 class _FakeOmahaProxy(_FakeFetcher): |
| 36 def fetch(self, url): | 40 def fetch(self, url): |
| 37 return self._ReadFile(os.path.join('server2', | 41 return _ReadTestData('branch_utility', 'first.json') |
| 38 'test_data', | |
| 39 'branch_utility', | |
| 40 'first.json')) | |
| 41 | 42 |
| 42 | 43 |
| 43 class FakeOmahaHistory(_FakeFetcher): | 44 class _FakeOmahaHistory(_FakeFetcher): |
| 44 def fetch(self, url): | 45 def fetch(self, url): |
| 45 return self._ReadFile(os.path.join('server2', | 46 return _ReadTestData('branch_utility', 'second.json') |
| 46 'test_data', | |
| 47 'branch_utility', | |
| 48 'second.json')) | |
| 49 | 47 |
| 50 | 48 |
| 51 class FakeSubversionServer(_FakeFetcher): | 49 _SVN_URL_TO_PATH_PATTERN = re.compile( |
| 52 def __init__(self, base_path): | 50 r'^.*chrome/.*(trunk|branches/.*)/src/?([^?]*).*?') |
| 53 _FakeFetcher.__init__(self, base_path) | 51 def _ExtractPathFromSvnUrl(url): |
| 54 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') | 52 return _SVN_URL_TO_PATH_PATTERN.match(url).group(2) |
| 55 | 53 |
| 54 |
| 55 class _FakeSubversionServer(_FakeFetcher): |
| 56 def fetch(self, url): | 56 def fetch(self, url): |
| 57 url = url.rsplit('?', 1)[0] | 57 path = ChromiumPath(_ExtractPathFromSvnUrl(url)) |
| 58 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) | |
| 59 if self._IsDir(path): | 58 if self._IsDir(path): |
| 60 html = ['<html>Revision 000000'] | 59 html = ['<html>Revision 000000'] |
| 61 try: | 60 try: |
| 62 for f in self._ListDir(path): | 61 for f in self._ListDir(path): |
| 63 if f.startswith('.'): | 62 if f.startswith('.'): |
| 64 continue | 63 continue |
| 65 if self._IsDir(os.path.join(path, f)): | 64 if self._IsDir(os.path.join(path, f)): |
| 66 html.append('<a>' + f + '/</a>') | 65 html.append('<a>' + f + '/</a>') |
| 67 else: | 66 else: |
| 68 html.append('<a>' + f + '</a>') | 67 html.append('<a>' + f + '</a>') |
| 69 html.append('</html>') | 68 html.append('</html>') |
| 70 return '\n'.join(html) | 69 return '\n'.join(html) |
| 71 except OSError as e: | 70 except OSError as e: |
| 72 return None | 71 return None |
| 73 try: | 72 try: |
| 74 return self._ReadFile(path) | 73 return ReadFile(path) |
| 75 except IOError as e: | 74 except IOError: |
| 76 return None | 75 return None |
| 77 | 76 |
| 78 | 77 |
| 79 class FakeViewvcServer(_FakeFetcher): | 78 class _FakeViewvcServer(_FakeFetcher): |
| 80 def __init__(self, base_path): | |
| 81 _FakeFetcher.__init__(self, base_path) | |
| 82 self._base_pattern = re.compile(r'.*chrome/common/extensions/+(.*)') | |
| 83 | |
| 84 def fetch(self, url): | 79 def fetch(self, url): |
| 85 url = url.rsplit('?', 1)[0] | 80 path = ChromiumPath(_ExtractPathFromSvnUrl(url)) |
| 86 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) | |
| 87 if self._IsDir(path): | 81 if self._IsDir(path): |
| 88 html = ['<table><tbody><tr>...</tr>'] | 82 html = ['<table><tbody><tr>...</tr>'] |
| 89 # The version of the directory. | 83 # The version of the directory. |
| 90 dir_stat = self._Stat(path) | 84 dir_stat = self._Stat(path) |
| 91 html.append('<tr>') | 85 html.append('<tr>') |
| 92 html.append('<td>Directory revision:</td>') | 86 html.append('<td>Directory revision:</td>') |
| 93 html.append('<td><a>%s</a><a></a></td>' % dir_stat) | 87 html.append('<td><a>%s</a><a></a></td>' % dir_stat) |
| 94 html.append('</tr>') | 88 html.append('</tr>') |
| 95 # The version of each file. | 89 # The version of each file. |
| 96 for f in self._ListDir(path): | 90 for f in self._ListDir(path): |
| 97 if f.startswith('.'): | 91 if f.startswith('.'): |
| 98 continue | 92 continue |
| 99 html.append('<tr>') | 93 html.append('<tr>') |
| 100 html.append(' <td><a>%s%s</a></td>' % ( | 94 html.append(' <td><a>%s%s</a></td>' % ( |
| 101 f, '/' if self._IsDir(os.path.join(path, f)) else '')) | 95 f, '/' if self._IsDir(os.path.join(path, f)) else '')) |
| 102 stat = self._Stat(os.path.join(path, f)) | 96 html.append(' <td><a><strong>%s</strong></a></td>' % |
| 103 html.append(' <td><a><strong>%s</strong></a></td>' % stat) | 97 self._Stat(os.path.join(path, f))) |
| 104 html.append('<td></td><td></td><td></td>') | 98 html.append('<td></td><td></td><td></td>') |
| 105 html.append('</tr>') | 99 html.append('</tr>') |
| 106 html.append('</tbody></table>') | 100 html.append('</tbody></table>') |
| 107 return '\n'.join(html) | 101 return '\n'.join(html) |
| 108 try: | 102 try: |
| 109 return self._ReadFile(path) | 103 return ReadFile(path) |
| 110 except IOError as e: | 104 except IOError: |
| 111 return None | 105 return None |
| 112 | 106 |
| 113 | 107 |
| 114 class FakeGithubStat(_FakeFetcher): | 108 class _FakeGithubStat(_FakeFetcher): |
| 115 def fetch(self, url): | 109 def fetch(self, url): |
| 116 return '{ "commit": { "tree": { "sha": 0} } }' | 110 return '{ "commit": { "tree": { "sha": 0} } }' |
| 117 | 111 |
| 118 | 112 |
| 119 class FakeGithubZip(_FakeFetcher): | 113 class _FakeGithubZip(_FakeFetcher): |
| 120 def fetch(self, url): | 114 def fetch(self, url): |
| 121 try: | 115 return _ReadTestData('github_file_system', 'apps_samples.zip', mode='rb') |
| 122 return self._ReadFile(os.path.join('server2', | |
| 123 'test_data', | |
| 124 'github_file_system', | |
| 125 'apps_samples.zip'), | |
| 126 mode='rb') | |
| 127 except IOError: | |
| 128 return None | |
| 129 | 116 |
| 130 | 117 |
| 131 class FakeRietveldAPI(_FakeFetcher): | 118 class _FakeRietveldAPI(_FakeFetcher): |
| 132 def __init__(self, base_path): | 119 def __init__(self): |
| 133 _FakeFetcher.__init__(self, base_path) | |
| 134 self._base_pattern = re.compile(r'.*/(api/.*)') | 120 self._base_pattern = re.compile(r'.*/(api/.*)') |
| 135 | 121 |
| 136 def fetch(self, url): | 122 def fetch(self, url): |
| 137 try: | 123 return _ReadTestData( |
| 138 return self._ReadFile( | 124 'rietveld_patcher', self._base_pattern.match(url).group(1), 'json') |
| 139 os.path.join('server2', | |
| 140 'test_data', | |
| 141 'rietveld_patcher', | |
| 142 self._base_pattern.match(url).group(1), | |
| 143 'json')) | |
| 144 except IOError: | |
| 145 return None | |
| 146 | 125 |
| 147 | 126 |
| 148 class FakeRietveldTarball(_FakeFetcher): | 127 class _FakeRietveldTarball(_FakeFetcher): |
| 149 def __init__(self, base_path): | 128 def __init__(self): |
| 150 _FakeFetcher.__init__(self, base_path) | |
| 151 self._base_pattern = re.compile(r'.*/(tarball/\d+/\d+)') | 129 self._base_pattern = re.compile(r'.*/(tarball/\d+/\d+)') |
| 152 | 130 |
| 153 def fetch(self, url): | 131 def fetch(self, url): |
| 154 try: | 132 return _ReadTestData( |
| 155 return self._ReadFile( | 133 'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2', |
| 156 os.path.join('server2', | 134 mode='rb') |
| 157 'test_data', | |
| 158 'rietveld_patcher', | |
| 159 self._base_pattern.match(url).group(1) + '.tar.bz2')) | |
| 160 except IOError: | |
| 161 return None | |
| 162 | 135 |
| 163 | 136 |
| 164 def ConfigureFakeFetchers(): | 137 def ConfigureFakeFetchers(): |
| 165 '''Configure the fake fetcher paths relative to the docs directory. | 138 '''Configure the fake fetcher paths relative to the docs directory. |
| 166 ''' | 139 ''' |
| 167 docs = '/'.join((sys.path[0], os.pardir)) | |
| 168 appengine_wrappers.ConfigureFakeUrlFetch({ | 140 appengine_wrappers.ConfigureFakeUrlFetch({ |
| 169 url_constants.OMAHA_PROXY_URL: FakeOmahaProxy(docs), | 141 url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(), |
| 170 re.escape(url_constants.OMAHA_DEV_HISTORY): FakeOmahaHistory(docs), | 142 re.escape(url_constants.OMAHA_DEV_HISTORY): _FakeOmahaHistory(), |
| 171 '%s/.*' % url_constants.SVN_URL: FakeSubversionServer(docs), | 143 '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(), |
| 172 '%s/.*' % url_constants.VIEWVC_URL: FakeViewvcServer(docs), | 144 '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(), |
| 173 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: FakeGithubStat(docs), | 145 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(), |
| 174 '%s/.*/zipball' % url_constants.GITHUB_REPOS: FakeGithubZip(docs), | 146 '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(), |
| 175 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: FakeRietveldAPI(docs), | 147 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(), |
| 176 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: | 148 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(), |
| 177 FakeRietveldTarball(docs), | |
| 178 }) | 149 }) |
| OLD | NEW |