Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(89)

Side by Side Diff: git_common.py

Issue 26109002: Add git-number script to calculate generation numbers for commits. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/depot_tools
Patch Set: Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « git-number ('k') | git_number.py » ('j') | git_number.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 # Monkeypatch IMapIterator so that Ctrl-C can kill everything properly.
6 # Derived from https://gist.github.com/aljungberg/626518
7 import multiprocessing.pool
8 from multiprocessing.pool import IMapIterator
9 def wrapper(func):
10 def wrap(self, timeout=None):
11 return func(self, timeout=timeout or 1e100)
12 return wrap
13 IMapIterator.next = wrapper(IMapIterator.next)
14 IMapIterator.__next__ = IMapIterator.next
15
16
17 import binascii
18 import contextlib
19 import functools
20 import logging
21 import signal
22 import sys
23 import tempfile
24 import threading
25
26 import subprocess2
27
28
29 GIT_EXE = 'git.bat' if sys.platform.startswith('win') else 'git'
30
31
32 class BadCommitRefException(Exception):
33 def __init__(self, refs):
34 msg = ('one of %s does not seem to be a valid commitref.' %
35 str(refs))
36 super(BadCommitRefException, self).__init__(msg)
37
38
39 def memoize_one(**kwargs):
40 """Memoizes a single-argument pure function.
41
42 Values of None are not cached.
43
44 Kwargs:
45 threadsafe (bool) - REQUIRED. Specifies whether to use locking around
46 cache manipulation functions. This is a kwarg so that users of memoize_one
47 are forced to explicitly and verbosely pick True or False.
48
49 Adds three methods to the decorated function:
50 * get(key, default=None) - Gets the value for this key from the cache.
51 * set(key, value) - Sets the value for this key from the cache.
52 * clear() - Drops the entire contents of the cache. Useful for unittests.
53 * update(other) - Updates the contents of the cache from another dict.
54 """
55 assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}'
56 threadsafe = kwargs['threadsafe']
57
58 if threadsafe:
59 def withlock(lock, f):
60 def inner(*args, **kwargs):
61 with lock:
62 return f(*args, **kwargs)
63 return inner
64 else:
65 def withlock(_lock, f):
66 return f
67
68 def decorator(f):
69 # Instantiate the lock in decorator, in case users of memoize_one do:
70 #
71 # memoizer = memoize_one(threadsafe=True)
72 #
73 # @memoizer
74 # def fn1(val): ...
75 #
76 # @memoizer
77 # def fn2(val): ...
78
79 lock = threading.Lock() if threadsafe else None
80 cache = {}
81 _get = withlock(lock, cache.get)
82 _set = withlock(lock, cache.__setitem__)
83
84 @functools.wraps(f)
85 def inner(arg):
86 ret = _get(arg)
87 if ret is None:
88 ret = f(arg)
89 if ret is not None:
90 _set(arg, ret)
91 return ret
92 inner.get = _get
93 inner.set = _set
94 inner.clear = withlock(lock, cache.clear)
95 inner.update = withlock(lock, cache.update)
96 return inner
97 return decorator
98
99
100 def _ScopedPool_initer(orig, orig_args): # pragma: no cover
101 """Initializer method for ScopedPool's subprocesses.
102
103 This helps ScopedPool handle Ctrl-C's correctly.
104 """
105 signal.signal(signal.SIGINT, signal.SIG_IGN)
106 if orig:
107 orig(*orig_args)
108
109
110 @contextlib.contextmanager
111 def ScopedPool(*args, **kwargs):
112 """Context Manager which returns a multiprocessing.pool instance which
113 correctly deals with thrown exceptions.
114
115 *args - Arguments to multiprocessing.pool
116
117 Kwargs:
118 kind ('threads', 'procs') - The type of underlying coprocess to use.
119 **etc - Arguments to multiprocessing.pool
120 """
121 if kwargs.pop('kind', None) == 'threads':
122 pool = multiprocessing.pool.ThreadPool(*args, **kwargs)
123 else:
124 orig, orig_args = kwargs.get('initializer'), kwargs.get('initargs', ())
125 kwargs['initializer'] = _ScopedPool_initer
126 kwargs['initargs'] = orig, orig_args
127 pool = multiprocessing.pool.Pool(*args, **kwargs)
128
129 try:
130 yield pool
131 pool.close()
132 except:
133 pool.terminate()
134 raise
135 finally:
136 pool.join()
137
138
139 class ProgressPrinter(object):
140 """Threaded single-stat status message printer."""
141 def __init__(self, fmt, enabled=None, stream=sys.stderr, period=0.5):
142 """Create a ProgressPrinter.
143
144 Use it as a context manager which produces a simple 'increment' method:
145
146 with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc:
147 for i in xrange(1000):
148 # do stuff
149 if i % 10 == 0:
150 inc(10)
151
152 Args:
153 fmt - String format with a single '%(count)d' where the counter value
154 should go.
155 enabled (bool) - If this is None, will default to True if
156 logging.getLogger() is set to INFO or more verbose.
157 stream (file-like) - The stream to print status messages to.
158 period (float) - The time in seconds for the printer thread to wait
159 between printing.
160 """
161 self.fmt = fmt
162 if enabled is None: # pragma: no cover
163 self.enabled = logging.getLogger().isEnabledFor(logging.INFO)
164 else:
165 self.enabled = enabled
166
167 self._count = 0
168 self._dead = False
169 self._dead_cond = threading.Condition()
170 self._stream = stream
171 self._thread = threading.Thread(target=self._run)
172 self._period = period
173
174 def _emit(self, s):
175 if self.enabled:
176 self._stream.write('\r' + s)
177 self._stream.flush()
178
179 def _run(self):
180 with self._dead_cond:
181 while not self._dead:
182 self._emit(self.fmt % {'count': self._count})
183 self._dead_cond.wait(self._period)
184 self._emit((self.fmt + '\n') % {'count': self._count})
185
186 def inc(self, amount=1):
187 self._count += amount
188
189 def __enter__(self):
190 self._thread.start()
191 return self.inc
192
193 def __exit__(self, _exc_type, _exc_value, _traceback):
194 self._dead = True
195 with self._dead_cond:
196 self._dead_cond.notifyAll()
197 self._thread.join()
198 del self._thread
199
200
201 def parse_commitrefs(*commitrefs):
202 """Returns binary encoded commit hashes for one or more commitrefs.
203
204 A commitref is anything which can resolve to a commit. Popular examples:
205 * 'HEAD'
206 * 'origin/master'
207 * 'cool_branch~2'
208 """
209 try:
210 return map(binascii.unhexlify, hashes(*commitrefs))
211 except subprocess2.CalledProcessError:
212 raise BadCommitRefException(commitrefs)
213
214
215 def run(*cmd, **kwargs):
216 """Runs a git command. Returns stdout as a string.
217
218 If logging is DEBUG, we'll print the command before we run it.
219
220 kwargs
221 autostrip (bool) - Strip the output. Defaults to True.
222 Output string is always strip()'d.
223 """
224 autostrip = kwargs.pop('autostrip', True)
225 cmd = (GIT_EXE,) + cmd
226 logging.debug('running: %s', ' '.join(repr(tok) for tok in cmd))
M-A Ruel 2013/11/17 19:54:58 map(repr, cmd) ? (kidding) 'Running ...' though.
iannucci 2013/11/18 05:38:56 Ha. Yeah, I've been warned away from map(), but I
227 ret = subprocess2.check_output(cmd, stderr=subprocess2.PIPE, **kwargs)
228 if autostrip:
229 ret = (ret or '').strip()
230 return ret
231
232
233 def hashes(*reflike):
234 return run('rev-parse', *reflike).splitlines()
235
236
237 def intern_f(f, kind='blob'):
238 """Interns a file object into the git object store.
239
240 Args:
241 f (file-like object) - The file-like object to intern
242 kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'.
243
244 Returns the git hash of the interned object (hex encoded).
245 """
246 ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f)
247 f.close()
248 return ret
249
250
251 def tree(treeref, recurse=False):
252 """Returns a dict representation of a git tree object.
253
254 Args:
255 treeref (str) - a git ref which resolves to a tree (commits count as trees).
256 recurse (bool) - include all of the tree's decendants too. File names will
257 take the form of 'some/path/to/file'.
258
259 Return format:
260 { 'file_name': (mode, type, ref) }
261
262 mode is an integer where:
263 * 0040000 - Directory
264 * 0100644 - Regular non-executable file
265 * 0100664 - Regular non-executable group-writeable file
266 * 0100755 - Regular executable file
267 * 0120000 - Symbolic link
268 * 0160000 - Gitlink
269
270 type is a string where it's one of 'blob', 'commit', 'tree', 'tag'.
271
272 ref is the hex encoded hash of the entry.
273 """
274 ret = {}
275 opts = ['ls-tree', '--full-tree']
276 if recurse:
277 opts.append('-r')
278 opts.append(treeref)
279 try:
280 for line in run(*opts).splitlines():
281 mode, typ, ref, name = line.split(None, 3)
282 ret[name] = (mode, typ, ref)
283 except subprocess2.CalledProcessError:
284 return None
285 return ret
286
287
288 def mktree(treedict):
289 """Makes a git tree object and returns its hash.
290
291 See |tree()| for the values of mode, type, and ref.
292
293 Args:
294 treedict - { name: (mode, type, ref) }
295 """
296 with tempfile.TemporaryFile() as f:
297 for name, (mode, typ, ref) in treedict.iteritems():
298 f.write('%s %s %s\t%s\0' % (mode, typ, ref, name))
299 f.seek(0)
300 return run('mktree', '-z', stdin=f)
OLDNEW
« no previous file with comments | « git-number ('k') | git_number.py » ('j') | git_number.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698