OLD | NEW |
(Empty) | |
| 1 # This program is free software; you can redistribute it and/or modify it under |
| 2 # the terms of the GNU General Public License as published by the Free Software |
| 3 # Foundation; either version 2 of the License, or (at your option) any later |
| 4 # version. |
| 5 # |
| 6 # This program is distributed in the hope that it will be useful, but WITHOUT |
| 7 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
| 8 # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. |
| 9 # |
| 10 # You should have received a copy of the GNU General Public License along with |
| 11 # this program; if not, write to the Free Software Foundation, Inc., |
| 12 # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. |
| 13 """ Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). |
| 14 http://www.logilab.fr/ -- mailto:contact@logilab.fr |
| 15 |
| 16 Raw metrics checker |
| 17 """ |
| 18 |
| 19 import tokenize |
| 20 |
| 21 # pylint now requires pylint >= 2.2, so this is no longer necessary |
| 22 #if not hasattr(tokenize, 'NL'): |
| 23 # raise ValueError("tokenize.NL doesn't exist -- tokenize module too old") |
| 24 |
| 25 from logilab.common.ureports import Table |
| 26 |
| 27 from pylint.interfaces import IRawChecker |
| 28 from pylint.checkers import BaseRawChecker, EmptyReport |
| 29 from pylint.reporters import diff_string |
| 30 |
| 31 def report_raw_stats(sect, stats, old_stats): |
| 32 """calculate percentage of code / doc / comment / empty |
| 33 """ |
| 34 total_lines = stats['total_lines'] |
| 35 if not total_lines: |
| 36 raise EmptyReport() |
| 37 sect.description = '%s lines have been analyzed' % total_lines |
| 38 lines = ('type', 'number', '%', 'previous', 'difference') |
| 39 for node_type in ('code', 'docstring', 'comment', 'empty'): |
| 40 key = node_type + '_lines' |
| 41 total = stats[key] |
| 42 percent = float(total * 100) / total_lines |
| 43 old = old_stats.get(key, None) |
| 44 if old is not None: |
| 45 diff_str = diff_string(old, total) |
| 46 else: |
| 47 old, diff_str = 'NC', 'NC' |
| 48 lines += (node_type, str(total), '%.2f' % percent, |
| 49 str(old), diff_str) |
| 50 sect.append(Table(children=lines, cols=5, rheaders=1)) |
| 51 |
| 52 |
| 53 class RawMetricsChecker(BaseRawChecker): |
| 54 """does not check anything but gives some raw metrics : |
| 55 * total number of lines |
| 56 * total number of code lines |
| 57 * total number of docstring lines |
| 58 * total number of comments lines |
| 59 * total number of empty lines |
| 60 """ |
| 61 |
| 62 __implements__ = (IRawChecker,) |
| 63 |
| 64 # configuration section name |
| 65 name = 'metrics' |
| 66 # configuration options |
| 67 options = ( ) |
| 68 # messages |
| 69 msgs = {} |
| 70 # reports |
| 71 reports = ( ('RP0701', 'Raw metrics', report_raw_stats), ) |
| 72 |
| 73 def __init__(self, linter): |
| 74 BaseRawChecker.__init__(self, linter) |
| 75 self.stats = None |
| 76 |
| 77 def open(self): |
| 78 """init statistics""" |
| 79 self.stats = self.linter.add_stats(total_lines=0, code_lines=0, |
| 80 empty_lines=0, docstring_lines=0, |
| 81 comment_lines=0) |
| 82 |
| 83 def process_tokens(self, tokens): |
| 84 """update stats""" |
| 85 i = 0 |
| 86 tokens = list(tokens) |
| 87 while i < len(tokens): |
| 88 i, lines_number, line_type = get_type(tokens, i) |
| 89 self.stats['total_lines'] += lines_number |
| 90 self.stats[line_type] += lines_number |
| 91 |
| 92 |
| 93 JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER) |
| 94 |
| 95 def get_type(tokens, start_index): |
| 96 """return the line type : docstring, comment, code, empty""" |
| 97 i = start_index |
| 98 tok_type = tokens[i][0] |
| 99 start = tokens[i][2] |
| 100 pos = start |
| 101 line_type = None |
| 102 while i < len(tokens) and tokens[i][2][0] == start[0]: |
| 103 tok_type = tokens[i][0] |
| 104 pos = tokens[i][3] |
| 105 if line_type is None: |
| 106 if tok_type == tokenize.STRING: |
| 107 line_type = 'docstring_lines' |
| 108 elif tok_type == tokenize.COMMENT: |
| 109 line_type = 'comment_lines' |
| 110 elif tok_type in JUNK: |
| 111 pass |
| 112 else: |
| 113 line_type = 'code_lines' |
| 114 i += 1 |
| 115 if line_type is None: |
| 116 line_type = 'empty_lines' |
| 117 elif i < len(tokens) and tok_type == tokenize.NEWLINE: |
| 118 i += 1 |
| 119 return i, pos[0] - start[0] + 1, line_type |
| 120 |
| 121 |
| 122 def register(linter): |
| 123 """ required method to auto register this checker """ |
| 124 linter.register_checker(RawMetricsChecker(linter)) |
| 125 |
OLD | NEW |