OLD | NEW |
(Empty) | |
| 1 import cython |
| 2 from Cython import __version__ |
| 3 |
| 4 import re, os, sys, time |
| 5 try: |
| 6 from glob import iglob |
| 7 except ImportError: |
| 8 # Py2.4 |
| 9 from glob import glob as iglob |
| 10 |
| 11 try: |
| 12 import gzip |
| 13 gzip_open = gzip.open |
| 14 gzip_ext = '.gz' |
| 15 except ImportError: |
| 16 gzip_open = open |
| 17 gzip_ext = '' |
| 18 import shutil |
| 19 import subprocess |
| 20 |
| 21 try: |
| 22 import hashlib |
| 23 except ImportError: |
| 24 import md5 as hashlib |
| 25 |
| 26 try: |
| 27 from io import open as io_open |
| 28 except ImportError: |
| 29 from codecs import open as io_open |
| 30 |
| 31 try: |
| 32 from os.path import relpath as _relpath |
| 33 except ImportError: |
| 34 # Py<2.6 |
| 35 def _relpath(path, start=os.path.curdir): |
| 36 if not path: |
| 37 raise ValueError("no path specified") |
| 38 start_list = os.path.abspath(start).split(os.path.sep) |
| 39 path_list = os.path.abspath(path).split(os.path.sep) |
| 40 i = len(os.path.commonprefix([start_list, path_list])) |
| 41 rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:] |
| 42 if not rel_list: |
| 43 return os.path.curdir |
| 44 return os.path.join(*rel_list) |
| 45 |
| 46 |
| 47 from distutils.extension import Extension |
| 48 |
| 49 from Cython import Utils |
| 50 from Cython.Utils import cached_function, cached_method, path_exists, find_root_
package_dir |
| 51 from Cython.Compiler.Main import Context, CompilationOptions, default_options |
| 52 |
| 53 join_path = cached_function(os.path.join) |
| 54 |
| 55 if sys.version_info[0] < 3: |
| 56 # stupid Py2 distutils enforces str type in list of sources |
| 57 _fs_encoding = sys.getfilesystemencoding() |
| 58 if _fs_encoding is None: |
| 59 _fs_encoding = sys.getdefaultencoding() |
| 60 def encode_filename_in_py2(filename): |
| 61 if isinstance(filename, unicode): |
| 62 return filename.encode(_fs_encoding) |
| 63 return filename |
| 64 else: |
| 65 def encode_filename_in_py2(filename): |
| 66 return filename |
| 67 basestring = str |
| 68 |
| 69 def extended_iglob(pattern): |
| 70 if '**/' in pattern: |
| 71 seen = set() |
| 72 first, rest = pattern.split('**/', 1) |
| 73 if first: |
| 74 first = iglob(first+'/') |
| 75 else: |
| 76 first = [''] |
| 77 for root in first: |
| 78 for path in extended_iglob(join_path(root, rest)): |
| 79 if path not in seen: |
| 80 seen.add(path) |
| 81 yield path |
| 82 for path in extended_iglob(join_path(root, '*', '**/' + rest)): |
| 83 if path not in seen: |
| 84 seen.add(path) |
| 85 yield path |
| 86 else: |
| 87 for path in iglob(pattern): |
| 88 yield path |
| 89 |
| 90 @cached_function |
| 91 def file_hash(filename): |
| 92 path = os.path.normpath(filename.encode("UTF-8")) |
| 93 m = hashlib.md5(str(len(path)) + ":") |
| 94 m.update(path) |
| 95 f = open(filename, 'rb') |
| 96 try: |
| 97 data = f.read(65000) |
| 98 while data: |
| 99 m.update(data) |
| 100 data = f.read(65000) |
| 101 finally: |
| 102 f.close() |
| 103 return m.hexdigest() |
| 104 |
| 105 def parse_list(s): |
| 106 """ |
| 107 >>> parse_list("a b c") |
| 108 ['a', 'b', 'c'] |
| 109 >>> parse_list("[a, b, c]") |
| 110 ['a', 'b', 'c'] |
| 111 >>> parse_list('a " " b') |
| 112 ['a', ' ', 'b'] |
| 113 >>> parse_list('[a, ",a", "a,", ",", ]') |
| 114 ['a', ',a', 'a,', ','] |
| 115 """ |
| 116 if s[0] == '[' and s[-1] == ']': |
| 117 s = s[1:-1] |
| 118 delimiter = ',' |
| 119 else: |
| 120 delimiter = ' ' |
| 121 s, literals = strip_string_literals(s) |
| 122 def unquote(literal): |
| 123 literal = literal.strip() |
| 124 if literal[0] in "'\"": |
| 125 return literals[literal[1:-1]] |
| 126 else: |
| 127 return literal |
| 128 return [unquote(item) for item in s.split(delimiter) if item.strip()] |
| 129 |
| 130 transitive_str = object() |
| 131 transitive_list = object() |
| 132 |
| 133 distutils_settings = { |
| 134 'name': str, |
| 135 'sources': list, |
| 136 'define_macros': list, |
| 137 'undef_macros': list, |
| 138 'libraries': transitive_list, |
| 139 'library_dirs': transitive_list, |
| 140 'runtime_library_dirs': transitive_list, |
| 141 'include_dirs': transitive_list, |
| 142 'extra_objects': list, |
| 143 'extra_compile_args': transitive_list, |
| 144 'extra_link_args': transitive_list, |
| 145 'export_symbols': list, |
| 146 'depends': transitive_list, |
| 147 'language': transitive_str, |
| 148 } |
| 149 |
| 150 @cython.locals(start=long, end=long) |
| 151 def line_iter(source): |
| 152 if isinstance(source, basestring): |
| 153 start = 0 |
| 154 while True: |
| 155 end = source.find('\n', start) |
| 156 if end == -1: |
| 157 yield source[start:] |
| 158 return |
| 159 yield source[start:end] |
| 160 start = end+1 |
| 161 else: |
| 162 for line in source: |
| 163 yield line |
| 164 |
| 165 class DistutilsInfo(object): |
| 166 |
| 167 def __init__(self, source=None, exn=None): |
| 168 self.values = {} |
| 169 if source is not None: |
| 170 for line in line_iter(source): |
| 171 line = line.strip() |
| 172 if line != '' and line[0] != '#': |
| 173 break |
| 174 line = line[1:].strip() |
| 175 if line[:10] == 'distutils:': |
| 176 line = line[10:] |
| 177 ix = line.index('=') |
| 178 key = str(line[:ix].strip()) |
| 179 value = line[ix+1:].strip() |
| 180 type = distutils_settings[key] |
| 181 if type in (list, transitive_list): |
| 182 value = parse_list(value) |
| 183 if key == 'define_macros': |
| 184 value = [tuple(macro.split('=')) for macro in value] |
| 185 self.values[key] = value |
| 186 elif exn is not None: |
| 187 for key in distutils_settings: |
| 188 if key in ('name', 'sources'): |
| 189 continue |
| 190 value = getattr(exn, key, None) |
| 191 if value: |
| 192 self.values[key] = value |
| 193 |
| 194 def merge(self, other): |
| 195 if other is None: |
| 196 return self |
| 197 for key, value in other.values.items(): |
| 198 type = distutils_settings[key] |
| 199 if type is transitive_str and key not in self.values: |
| 200 self.values[key] = value |
| 201 elif type is transitive_list: |
| 202 if key in self.values: |
| 203 all = self.values[key] |
| 204 for v in value: |
| 205 if v not in all: |
| 206 all.append(v) |
| 207 else: |
| 208 self.values[key] = value |
| 209 return self |
| 210 |
| 211 def subs(self, aliases): |
| 212 if aliases is None: |
| 213 return self |
| 214 resolved = DistutilsInfo() |
| 215 for key, value in self.values.items(): |
| 216 type = distutils_settings[key] |
| 217 if type in [list, transitive_list]: |
| 218 new_value_list = [] |
| 219 for v in value: |
| 220 if v in aliases: |
| 221 v = aliases[v] |
| 222 if isinstance(v, list): |
| 223 new_value_list += v |
| 224 else: |
| 225 new_value_list.append(v) |
| 226 value = new_value_list |
| 227 else: |
| 228 if value in aliases: |
| 229 value = aliases[value] |
| 230 resolved.values[key] = value |
| 231 return resolved |
| 232 |
| 233 def apply(self, extension): |
| 234 for key, value in self.values.items(): |
| 235 type = distutils_settings[key] |
| 236 if type in [list, transitive_list]: |
| 237 getattr(extension, key).extend(value) |
| 238 else: |
| 239 setattr(extension, key, value) |
| 240 |
| 241 @cython.locals(start=long, q=long, single_q=long, double_q=long, hash_mark=long, |
| 242 end=long, k=long, counter=long, quote_len=long) |
| 243 def strip_string_literals(code, prefix='__Pyx_L'): |
| 244 """ |
| 245 Normalizes every string literal to be of the form '__Pyx_Lxxx', |
| 246 returning the normalized code and a mapping of labels to |
| 247 string literals. |
| 248 """ |
| 249 new_code = [] |
| 250 literals = {} |
| 251 counter = 0 |
| 252 start = q = 0 |
| 253 in_quote = False |
| 254 hash_mark = single_q = double_q = -1 |
| 255 code_len = len(code) |
| 256 |
| 257 while True: |
| 258 if hash_mark < q: |
| 259 hash_mark = code.find('#', q) |
| 260 if single_q < q: |
| 261 single_q = code.find("'", q) |
| 262 if double_q < q: |
| 263 double_q = code.find('"', q) |
| 264 q = min(single_q, double_q) |
| 265 if q == -1: q = max(single_q, double_q) |
| 266 |
| 267 # We're done. |
| 268 if q == -1 and hash_mark == -1: |
| 269 new_code.append(code[start:]) |
| 270 break |
| 271 |
| 272 # Try to close the quote. |
| 273 elif in_quote: |
| 274 if code[q-1] == u'\\': |
| 275 k = 2 |
| 276 while q >= k and code[q-k] == u'\\': |
| 277 k += 1 |
| 278 if k % 2 == 0: |
| 279 q += 1 |
| 280 continue |
| 281 if code[q] == quote_type and (quote_len == 1 or (code_len > q + 2 an
d quote_type == code[q+1] == code[q+2])): |
| 282 counter += 1 |
| 283 label = "%s%s_" % (prefix, counter) |
| 284 literals[label] = code[start+quote_len:q] |
| 285 full_quote = code[q:q+quote_len] |
| 286 new_code.append(full_quote) |
| 287 new_code.append(label) |
| 288 new_code.append(full_quote) |
| 289 q += quote_len |
| 290 in_quote = False |
| 291 start = q |
| 292 else: |
| 293 q += 1 |
| 294 |
| 295 # Process comment. |
| 296 elif -1 != hash_mark and (hash_mark < q or q == -1): |
| 297 new_code.append(code[start:hash_mark+1]) |
| 298 end = code.find('\n', hash_mark) |
| 299 counter += 1 |
| 300 label = "%s%s_" % (prefix, counter) |
| 301 if end == -1: |
| 302 end_or_none = None |
| 303 else: |
| 304 end_or_none = end |
| 305 literals[label] = code[hash_mark+1:end_or_none] |
| 306 new_code.append(label) |
| 307 if end == -1: |
| 308 break |
| 309 start = q = end |
| 310 |
| 311 # Open the quote. |
| 312 else: |
| 313 if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]): |
| 314 quote_len = 3 |
| 315 else: |
| 316 quote_len = 1 |
| 317 in_quote = True |
| 318 quote_type = code[q] |
| 319 new_code.append(code[start:q]) |
| 320 start = q |
| 321 q += quote_len |
| 322 |
| 323 return "".join(new_code), literals |
| 324 |
| 325 |
| 326 dependancy_regex = re.compile(r"(?:^from +([0-9a-zA-Z_.]+) +cimport)|" |
| 327 r"(?:^cimport +([0-9a-zA-Z_.]+)\b)|" |
| 328 r"(?:^cdef +extern +from +['\"]([^'\"]+)['\"])|" |
| 329 r"(?:^include +['\"]([^'\"]+)['\"])", re.M) |
| 330 |
| 331 def normalize_existing(base_path, rel_paths): |
| 332 return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths))
) |
| 333 |
| 334 @cached_function |
| 335 def normalize_existing0(base_dir, rel_paths): |
| 336 normalized = [] |
| 337 for rel in rel_paths: |
| 338 path = join_path(base_dir, rel) |
| 339 if path_exists(path): |
| 340 normalized.append(os.path.normpath(path)) |
| 341 else: |
| 342 normalized.append(rel) |
| 343 return normalized |
| 344 |
| 345 def resolve_depends(depends, include_dirs): |
| 346 include_dirs = tuple(include_dirs) |
| 347 resolved = [] |
| 348 for depend in depends: |
| 349 path = resolve_depend(depend, include_dirs) |
| 350 if path is not None: |
| 351 resolved.append(path) |
| 352 return resolved |
| 353 |
| 354 @cached_function |
| 355 def resolve_depend(depend, include_dirs): |
| 356 if depend[0] == '<' and depend[-1] == '>': |
| 357 return None |
| 358 for dir in include_dirs: |
| 359 path = join_path(dir, depend) |
| 360 if path_exists(path): |
| 361 return os.path.normpath(path) |
| 362 return None |
| 363 |
| 364 @cached_function |
| 365 def package(filename): |
| 366 dir = os.path.dirname(os.path.abspath(str(filename))) |
| 367 if dir != filename and path_exists(join_path(dir, '__init__.py')): |
| 368 return package(dir) + (os.path.basename(dir),) |
| 369 else: |
| 370 return () |
| 371 |
| 372 @cached_function |
| 373 def fully_qualified_name(filename): |
| 374 module = os.path.splitext(os.path.basename(filename))[0] |
| 375 return '.'.join(package(filename) + (module,)) |
| 376 |
| 377 |
| 378 @cached_function |
| 379 def parse_dependencies(source_filename): |
| 380 # Actual parsing is way to slow, so we use regular expressions. |
| 381 # The only catch is that we must strip comments and string |
| 382 # literals ahead of time. |
| 383 fh = Utils.open_source_file(source_filename, "rU", error_handling='ignore') |
| 384 try: |
| 385 source = fh.read() |
| 386 finally: |
| 387 fh.close() |
| 388 distutils_info = DistutilsInfo(source) |
| 389 source, literals = strip_string_literals(source) |
| 390 source = source.replace('\\\n', ' ').replace('\t', ' ') |
| 391 |
| 392 # TODO: pure mode |
| 393 cimports = [] |
| 394 includes = [] |
| 395 externs = [] |
| 396 for m in dependancy_regex.finditer(source): |
| 397 cimport_from, cimport, extern, include = m.groups() |
| 398 if cimport_from: |
| 399 cimports.append(cimport_from) |
| 400 elif cimport: |
| 401 cimports.append(cimport) |
| 402 elif extern: |
| 403 externs.append(literals[extern]) |
| 404 else: |
| 405 includes.append(literals[include]) |
| 406 return cimports, includes, externs, distutils_info |
| 407 |
| 408 |
| 409 class DependencyTree(object): |
| 410 |
| 411 def __init__(self, context, quiet=False): |
| 412 self.context = context |
| 413 self.quiet = quiet |
| 414 self._transitive_cache = {} |
| 415 |
| 416 def parse_dependencies(self, source_filename): |
| 417 return parse_dependencies(source_filename) |
| 418 |
| 419 @cached_method |
| 420 def included_files(self, filename): |
| 421 # This is messy because included files are textually included, resolving |
| 422 # cimports (but not includes) relative to the including file. |
| 423 all = set() |
| 424 for include in self.parse_dependencies(filename)[1]: |
| 425 include_path = join_path(os.path.dirname(filename), include) |
| 426 if not path_exists(include_path): |
| 427 include_path = self.context.find_include_file(include, None) |
| 428 if include_path: |
| 429 if '.' + os.path.sep in include_path: |
| 430 include_path = os.path.normpath(include_path) |
| 431 all.add(include_path) |
| 432 all.update(self.included_files(include_path)) |
| 433 elif not self.quiet: |
| 434 print("Unable to locate '%s' referenced from '%s'" % (filename,
include)) |
| 435 return all |
| 436 |
| 437 @cached_method |
| 438 def cimports_and_externs(self, filename): |
| 439 # This is really ugly. Nested cimports are resolved with respect to the |
| 440 # includer, but includes are resolved with respect to the includee. |
| 441 cimports, includes, externs = self.parse_dependencies(filename)[:3] |
| 442 cimports = set(cimports) |
| 443 externs = set(externs) |
| 444 for include in self.included_files(filename): |
| 445 included_cimports, included_externs = self.cimports_and_externs(incl
ude) |
| 446 cimports.update(included_cimports) |
| 447 externs.update(included_externs) |
| 448 return tuple(cimports), normalize_existing(filename, externs) |
| 449 |
| 450 def cimports(self, filename): |
| 451 return self.cimports_and_externs(filename)[0] |
| 452 |
| 453 def package(self, filename): |
| 454 return package(filename) |
| 455 |
| 456 def fully_qualified_name(self, filename): |
| 457 return fully_qualified_name(filename) |
| 458 |
| 459 @cached_method |
| 460 def find_pxd(self, module, filename=None): |
| 461 is_relative = module[0] == '.' |
| 462 if is_relative and not filename: |
| 463 raise NotImplementedError("New relative imports.") |
| 464 if filename is not None: |
| 465 module_path = module.split('.') |
| 466 if is_relative: |
| 467 module_path.pop(0) # just explicitly relative |
| 468 package_path = list(self.package(filename)) |
| 469 while module_path and not module_path[0]: |
| 470 try: |
| 471 package_path.pop() |
| 472 except IndexError: |
| 473 return None # FIXME: error? |
| 474 module_path.pop(0) |
| 475 relative = '.'.join(package_path + module_path) |
| 476 pxd = self.context.find_pxd_file(relative, None) |
| 477 if pxd: |
| 478 return pxd |
| 479 if is_relative: |
| 480 return None # FIXME: error? |
| 481 return self.context.find_pxd_file(module, None) |
| 482 |
| 483 @cached_method |
| 484 def cimported_files(self, filename): |
| 485 if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'): |
| 486 pxd_list = [filename[:-4] + '.pxd'] |
| 487 else: |
| 488 pxd_list = [] |
| 489 for module in self.cimports(filename): |
| 490 if module[:7] == 'cython.' or module == 'cython': |
| 491 continue |
| 492 pxd_file = self.find_pxd(module, filename) |
| 493 if pxd_file is not None: |
| 494 pxd_list.append(pxd_file) |
| 495 elif not self.quiet: |
| 496 print("missing cimport in module '%s': %s" % (module, filename)) |
| 497 return tuple(pxd_list) |
| 498 |
| 499 @cached_method |
| 500 def immediate_dependencies(self, filename): |
| 501 all = set([filename]) |
| 502 all.update(self.cimported_files(filename)) |
| 503 all.update(self.included_files(filename)) |
| 504 return all |
| 505 |
| 506 def all_dependencies(self, filename): |
| 507 return self.transitive_merge(filename, self.immediate_dependencies, set.
union) |
| 508 |
| 509 @cached_method |
| 510 def timestamp(self, filename): |
| 511 return os.path.getmtime(filename) |
| 512 |
| 513 def extract_timestamp(self, filename): |
| 514 return self.timestamp(filename), filename |
| 515 |
| 516 def newest_dependency(self, filename): |
| 517 return max([self.extract_timestamp(f) for f in self.all_dependencies(fil
ename)]) |
| 518 |
| 519 def transitive_fingerprint(self, filename, extra=None): |
| 520 try: |
| 521 m = hashlib.md5(__version__) |
| 522 m.update(file_hash(filename)) |
| 523 for x in sorted(self.all_dependencies(filename)): |
| 524 if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'): |
| 525 m.update(file_hash(x)) |
| 526 if extra is not None: |
| 527 m.update(str(extra)) |
| 528 return m.hexdigest() |
| 529 except IOError: |
| 530 return None |
| 531 |
| 532 def distutils_info0(self, filename): |
| 533 info = self.parse_dependencies(filename)[3] |
| 534 externs = self.cimports_and_externs(filename)[1] |
| 535 if externs: |
| 536 if 'depends' in info.values: |
| 537 info.values['depends'] = list(set(info.values['depends']).union(
externs)) |
| 538 else: |
| 539 info.values['depends'] = list(externs) |
| 540 return info |
| 541 |
| 542 def distutils_info(self, filename, aliases=None, base=None): |
| 543 return (self.transitive_merge(filename, self.distutils_info0, DistutilsI
nfo.merge) |
| 544 .subs(aliases) |
| 545 .merge(base)) |
| 546 |
| 547 def transitive_merge(self, node, extract, merge): |
| 548 try: |
| 549 seen = self._transitive_cache[extract, merge] |
| 550 except KeyError: |
| 551 seen = self._transitive_cache[extract, merge] = {} |
| 552 return self.transitive_merge_helper( |
| 553 node, extract, merge, seen, {}, self.cimported_files)[0] |
| 554 |
| 555 def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoin
g): |
| 556 if node in seen: |
| 557 return seen[node], None |
| 558 deps = extract(node) |
| 559 if node in stack: |
| 560 return deps, node |
| 561 try: |
| 562 stack[node] = len(stack) |
| 563 loop = None |
| 564 for next in outgoing(node): |
| 565 sub_deps, sub_loop = self.transitive_merge_helper(next, extract,
merge, seen, stack, outgoing) |
| 566 if sub_loop is not None: |
| 567 if loop is not None and stack[loop] < stack[sub_loop]: |
| 568 pass |
| 569 else: |
| 570 loop = sub_loop |
| 571 deps = merge(deps, sub_deps) |
| 572 if loop == node: |
| 573 loop = None |
| 574 if loop is None: |
| 575 seen[node] = deps |
| 576 return deps, loop |
| 577 finally: |
| 578 del stack[node] |
| 579 |
| 580 _dep_tree = None |
| 581 def create_dependency_tree(ctx=None, quiet=False): |
| 582 global _dep_tree |
| 583 if _dep_tree is None: |
| 584 if ctx is None: |
| 585 ctx = Context(["."], CompilationOptions(default_options)) |
| 586 _dep_tree = DependencyTree(ctx, quiet=quiet) |
| 587 return _dep_tree |
| 588 |
| 589 # This may be useful for advanced users? |
| 590 def create_extension_list(patterns, exclude=[], ctx=None, aliases=None, quiet=Fa
lse, exclude_failures=False): |
| 591 if not isinstance(patterns, (list, tuple)): |
| 592 patterns = [patterns] |
| 593 explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)
]) |
| 594 seen = set() |
| 595 deps = create_dependency_tree(ctx, quiet=quiet) |
| 596 to_exclude = set() |
| 597 if not isinstance(exclude, list): |
| 598 exclude = [exclude] |
| 599 for pattern in exclude: |
| 600 to_exclude.update(map(os.path.abspath, extended_iglob(pattern))) |
| 601 module_list = [] |
| 602 for pattern in patterns: |
| 603 if isinstance(pattern, str): |
| 604 filepattern = pattern |
| 605 template = None |
| 606 name = '*' |
| 607 base = None |
| 608 exn_type = Extension |
| 609 elif isinstance(pattern, Extension): |
| 610 filepattern = pattern.sources[0] |
| 611 if os.path.splitext(filepattern)[1] not in ('.py', '.pyx'): |
| 612 # ignore non-cython modules |
| 613 module_list.append(pattern) |
| 614 continue |
| 615 template = pattern |
| 616 name = template.name |
| 617 base = DistutilsInfo(exn=template) |
| 618 exn_type = template.__class__ |
| 619 else: |
| 620 raise TypeError(pattern) |
| 621 for file in extended_iglob(filepattern): |
| 622 if os.path.abspath(file) in to_exclude: |
| 623 continue |
| 624 pkg = deps.package(file) |
| 625 if '*' in name: |
| 626 module_name = deps.fully_qualified_name(file) |
| 627 if module_name in explicit_modules: |
| 628 continue |
| 629 else: |
| 630 module_name = name |
| 631 if module_name not in seen: |
| 632 try: |
| 633 kwds = deps.distutils_info(file, aliases, base).values |
| 634 except Exception: |
| 635 if exclude_failures: |
| 636 continue |
| 637 raise |
| 638 if base is not None: |
| 639 for key, value in base.values.items(): |
| 640 if key not in kwds: |
| 641 kwds[key] = value |
| 642 sources = [file] |
| 643 if template is not None: |
| 644 sources += template.sources[1:] |
| 645 if 'sources' in kwds: |
| 646 # allow users to add .c files etc. |
| 647 for source in kwds['sources']: |
| 648 source = encode_filename_in_py2(source) |
| 649 if source not in sources: |
| 650 sources.append(source) |
| 651 del kwds['sources'] |
| 652 if 'depends' in kwds: |
| 653 depends = resolve_depends(kwds['depends'], (kwds.get('includ
e_dirs') or []) + [find_root_package_dir(file)]) |
| 654 if template is not None: |
| 655 # Always include everything from the template. |
| 656 depends = list(set(template.depends).union(set(depends))
) |
| 657 kwds['depends'] = depends |
| 658 module_list.append(exn_type( |
| 659 name=module_name, |
| 660 sources=sources, |
| 661 **kwds)) |
| 662 m = module_list[-1] |
| 663 seen.add(name) |
| 664 return module_list |
| 665 |
| 666 # This is the user-exposed entry point. |
| 667 def cythonize(module_list, exclude=[], nthreads=0, aliases=None, quiet=False, fo
rce=False, |
| 668 exclude_failures=False, **options): |
| 669 """ |
| 670 Compile a set of source modules into C/C++ files and return a list of distut
ils |
| 671 Extension objects for them. |
| 672 |
| 673 As module list, pass either a glob pattern, a list of glob patterns or a lis
t of |
| 674 Extension objects. The latter allows you to configure the extensions separa
tely |
| 675 through the normal distutils options. |
| 676 |
| 677 When using glob patterns, you can exclude certain module names explicitly |
| 678 by passing them into the 'exclude' option. |
| 679 |
| 680 For parallel compilation, set the 'nthreads' option to the number of |
| 681 concurrent builds. |
| 682 |
| 683 For a broad 'try to compile' mode that ignores compilation failures and |
| 684 simply excludes the failed extensions, pass 'exclude_failures=True'. Note |
| 685 that this only really makes sense for compiling .py files which can also |
| 686 be used without compilation. |
| 687 |
| 688 Additional compilation options can be passed as keyword arguments. |
| 689 """ |
| 690 if 'include_path' not in options: |
| 691 options['include_path'] = ['.'] |
| 692 if 'common_utility_include_dir' in options: |
| 693 if options.get('cache'): |
| 694 raise NotImplementedError("common_utility_include_dir does not yet w
ork with caching") |
| 695 if not os.path.exists(options['common_utility_include_dir']): |
| 696 os.makedirs(options['common_utility_include_dir']) |
| 697 c_options = CompilationOptions(**options) |
| 698 cpp_options = CompilationOptions(**options); cpp_options.cplus = True |
| 699 ctx = c_options.create_context() |
| 700 options = c_options |
| 701 module_list = create_extension_list( |
| 702 module_list, |
| 703 exclude=exclude, |
| 704 ctx=ctx, |
| 705 quiet=quiet, |
| 706 exclude_failures=exclude_failures, |
| 707 aliases=aliases) |
| 708 deps = create_dependency_tree(ctx, quiet=quiet) |
| 709 build_dir = getattr(options, 'build_dir', None) |
| 710 modules_by_cfile = {} |
| 711 to_compile = [] |
| 712 for m in module_list: |
| 713 if build_dir: |
| 714 root = os.path.realpath(os.path.abspath(find_root_package_dir(m.sour
ces[0]))) |
| 715 def copy_to_build_dir(filepath, root=root): |
| 716 filepath_abs = os.path.realpath(os.path.abspath(filepath)) |
| 717 if os.path.isabs(filepath): |
| 718 filepath = filepath_abs |
| 719 if filepath_abs.startswith(root): |
| 720 mod_dir = os.path.join(build_dir, |
| 721 os.path.dirname(_relpath(filepath, root))) |
| 722 if not os.path.isdir(mod_dir): |
| 723 os.makedirs(mod_dir) |
| 724 shutil.copy(filepath, mod_dir) |
| 725 for dep in m.depends: |
| 726 copy_to_build_dir(dep) |
| 727 |
| 728 new_sources = [] |
| 729 for source in m.sources: |
| 730 base, ext = os.path.splitext(source) |
| 731 if ext in ('.pyx', '.py'): |
| 732 if m.language == 'c++': |
| 733 c_file = base + '.cpp' |
| 734 options = cpp_options |
| 735 else: |
| 736 c_file = base + '.c' |
| 737 options = c_options |
| 738 |
| 739 # setup for out of place build directory if enabled |
| 740 if build_dir: |
| 741 c_file = os.path.join(build_dir, c_file) |
| 742 dir = os.path.dirname(c_file) |
| 743 if not os.path.isdir(dir): |
| 744 os.makedirs(dir) |
| 745 |
| 746 if os.path.exists(c_file): |
| 747 c_timestamp = os.path.getmtime(c_file) |
| 748 else: |
| 749 c_timestamp = -1 |
| 750 |
| 751 # Priority goes first to modified files, second to direct |
| 752 # dependents, and finally to indirect dependents. |
| 753 if c_timestamp < deps.timestamp(source): |
| 754 dep_timestamp, dep = deps.timestamp(source), source |
| 755 priority = 0 |
| 756 else: |
| 757 dep_timestamp, dep = deps.newest_dependency(source) |
| 758 priority = 2 - (dep in deps.immediate_dependencies(source)) |
| 759 if force or c_timestamp < dep_timestamp: |
| 760 if not quiet: |
| 761 if source == dep: |
| 762 print("Compiling %s because it changed." % source) |
| 763 else: |
| 764 print("Compiling %s because it depends on %s." % (so
urce, dep)) |
| 765 if not force and hasattr(options, 'cache'): |
| 766 extra = m.language |
| 767 fingerprint = deps.transitive_fingerprint(source, extra) |
| 768 else: |
| 769 fingerprint = None |
| 770 to_compile.append((priority, source, c_file, fingerprint, qu
iet, |
| 771 options, not exclude_failures)) |
| 772 new_sources.append(c_file) |
| 773 if c_file not in modules_by_cfile: |
| 774 modules_by_cfile[c_file] = [m] |
| 775 else: |
| 776 modules_by_cfile[c_file].append(m) |
| 777 else: |
| 778 new_sources.append(source) |
| 779 if build_dir: |
| 780 copy_to_build_dir(source) |
| 781 m.sources = new_sources |
| 782 if hasattr(options, 'cache'): |
| 783 if not os.path.exists(options.cache): |
| 784 os.makedirs(options.cache) |
| 785 to_compile.sort() |
| 786 if nthreads: |
| 787 # Requires multiprocessing (or Python >= 2.6) |
| 788 try: |
| 789 import multiprocessing |
| 790 pool = multiprocessing.Pool(nthreads) |
| 791 except (ImportError, OSError): |
| 792 print("multiprocessing required for parallel cythonization") |
| 793 nthreads = 0 |
| 794 else: |
| 795 pool.map(cythonize_one_helper, to_compile) |
| 796 if not nthreads: |
| 797 for args in to_compile: |
| 798 cythonize_one(*args[1:]) |
| 799 if exclude_failures: |
| 800 failed_modules = set() |
| 801 for c_file, modules in modules_by_cfile.iteritems(): |
| 802 if not os.path.exists(c_file): |
| 803 failed_modules.update(modules) |
| 804 elif os.path.getsize(c_file) < 200: |
| 805 f = io_open(c_file, 'r', encoding='iso8859-1') |
| 806 try: |
| 807 if f.read(len('#error ')) == '#error ': |
| 808 # dead compilation result |
| 809 failed_modules.update(modules) |
| 810 finally: |
| 811 f.close() |
| 812 if failed_modules: |
| 813 for module in failed_modules: |
| 814 module_list.remove(module) |
| 815 print("Failed compilations: %s" % ', '.join(sorted([ |
| 816 module.name for module in failed_modules]))) |
| 817 if hasattr(options, 'cache'): |
| 818 cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024
* 100)) |
| 819 # cythonize() is often followed by the (non-Python-buffered) |
| 820 # compiler output, flush now to avoid interleaving output. |
| 821 sys.stdout.flush() |
| 822 return module_list |
| 823 |
| 824 |
| 825 if os.environ.get('XML_RESULTS'): |
| 826 compile_result_dir = os.environ['XML_RESULTS'] |
| 827 def record_results(func): |
| 828 def with_record(*args): |
| 829 t = time.time() |
| 830 success = True |
| 831 try: |
| 832 try: |
| 833 func(*args) |
| 834 except: |
| 835 success = False |
| 836 finally: |
| 837 t = time.time() - t |
| 838 module = fully_qualified_name(args[0]) |
| 839 name = "cythonize." + module |
| 840 failures = 1 - success |
| 841 if success: |
| 842 failure_item = "" |
| 843 else: |
| 844 failure_item = "failure" |
| 845 output = open(os.path.join(compile_result_dir, name + ".xml"), "
w") |
| 846 output.write(""" |
| 847 <?xml version="1.0" ?> |
| 848 <testsuite name="%(name)s" errors="0" failures="%(failures)s
" tests="1" time="%(t)s"> |
| 849 <testcase classname="%(name)s" name="cythonize"> |
| 850 %(failure_item)s |
| 851 </testcase> |
| 852 </testsuite> |
| 853 """.strip() % locals()) |
| 854 output.close() |
| 855 return with_record |
| 856 else: |
| 857 record_results = lambda x: x |
| 858 |
| 859 # TODO: Share context? Issue: pyx processing leaks into pxd module |
| 860 @record_results |
| 861 def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None, raise_on_f
ailure=True): |
| 862 from Cython.Compiler.Main import compile, default_options |
| 863 from Cython.Compiler.Errors import CompileError, PyrexError |
| 864 |
| 865 if fingerprint: |
| 866 if not os.path.exists(options.cache): |
| 867 try: |
| 868 os.mkdir(options.cache) |
| 869 except: |
| 870 if not os.path.exists(options.cache): |
| 871 raise |
| 872 # Cython-generated c files are highly compressible. |
| 873 # (E.g. a compression ratio of about 10 for Sage). |
| 874 fingerprint_file = join_path( |
| 875 options.cache, "%s-%s%s" % (os.path.basename(c_file), fingerprint, g
zip_ext)) |
| 876 if os.path.exists(fingerprint_file): |
| 877 if not quiet: |
| 878 print("Found compiled %s in cache" % pyx_file) |
| 879 os.utime(fingerprint_file, None) |
| 880 g = gzip_open(fingerprint_file, 'rb') |
| 881 try: |
| 882 f = open(c_file, 'wb') |
| 883 try: |
| 884 shutil.copyfileobj(g, f) |
| 885 finally: |
| 886 f.close() |
| 887 finally: |
| 888 g.close() |
| 889 return |
| 890 if not quiet: |
| 891 print("Cythonizing %s" % pyx_file) |
| 892 if options is None: |
| 893 options = CompilationOptions(default_options) |
| 894 options.output_file = c_file |
| 895 |
| 896 any_failures = 0 |
| 897 try: |
| 898 result = compile([pyx_file], options) |
| 899 if result.num_errors > 0: |
| 900 any_failures = 1 |
| 901 except (EnvironmentError, PyrexError), e: |
| 902 sys.stderr.write('%s\n' % e) |
| 903 any_failures = 1 |
| 904 # XXX |
| 905 import traceback |
| 906 traceback.print_exc() |
| 907 except Exception: |
| 908 if raise_on_failure: |
| 909 raise |
| 910 import traceback |
| 911 traceback.print_exc() |
| 912 any_failures = 1 |
| 913 if any_failures: |
| 914 if raise_on_failure: |
| 915 raise CompileError(None, pyx_file) |
| 916 elif os.path.exists(c_file): |
| 917 os.remove(c_file) |
| 918 elif fingerprint: |
| 919 f = open(c_file, 'rb') |
| 920 try: |
| 921 g = gzip_open(fingerprint_file, 'wb') |
| 922 try: |
| 923 shutil.copyfileobj(f, g) |
| 924 finally: |
| 925 g.close() |
| 926 finally: |
| 927 f.close() |
| 928 |
| 929 def cythonize_one_helper(m): |
| 930 import traceback |
| 931 try: |
| 932 return cythonize_one(*m[1:]) |
| 933 except Exception: |
| 934 traceback.print_exc() |
| 935 raise |
| 936 |
| 937 def cleanup_cache(cache, target_size, ratio=.85): |
| 938 try: |
| 939 p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=
subprocess.PIPE) |
| 940 res = p.wait() |
| 941 if res == 0: |
| 942 total_size = 1024 * int(p.stdout.read().strip().split()[0]) |
| 943 if total_size < target_size: |
| 944 return |
| 945 except (OSError, ValueError): |
| 946 pass |
| 947 total_size = 0 |
| 948 all = [] |
| 949 for file in os.listdir(cache): |
| 950 path = join_path(cache, file) |
| 951 s = os.stat(path) |
| 952 total_size += s.st_size |
| 953 all.append((s.st_atime, s.st_size, path)) |
| 954 if total_size > target_size: |
| 955 for time, size, file in reversed(sorted(all)): |
| 956 os.unlink(file) |
| 957 total_size -= size |
| 958 if total_size < target_size * ratio: |
| 959 break |
OLD | NEW |