| OLD | NEW |
| (Empty) | |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 """Runs delta test on 2 findit versions.""" |
| 6 |
| 7 import argparse |
| 8 from datetime import date |
| 9 from datetime import timedelta |
| 10 import pickle |
| 11 import logging |
| 12 import os |
| 13 import sys |
| 14 |
| 15 _SCRIPT_DIR = os.path.join(os.path.dirname(__file__), os.path.pardir, |
| 16 os.path.pardir) |
| 17 sys.path.insert(1, _SCRIPT_DIR) |
| 18 |
| 19 import script_util |
| 20 script_util.SetUpSystemPaths() |
| 21 |
| 22 from crash.type_enums import CrashClient |
| 23 from crash_queries.delta_test import delta_test |
| 24 from crash_queries.delta_test import delta_util |
| 25 |
| 26 _TODAY = date.today().strftime('%Y-%m-%d') |
| 27 _A_YEAR_AGO = (date.today() - timedelta(days=365)).strftime('%Y-%m-%d') |
| 28 |
| 29 # App Engine APIs will fail if batch size is more than 1000. |
| 30 _MAX_BATCH_SIZE = 1000 |
| 31 _DEFAULT_BATCH_SIZE = _MAX_BATCH_SIZE |
| 32 |
| 33 DELTA_RESULTS_DIRECTORY = os.path.join(os.path.dirname(__file__), |
| 34 'delta_results') |
| 35 CHROMIUM_REPO = 'https://chromium.googlesource.com/chromium/src' |
| 36 |
| 37 |
| 38 def RunDeltaTest(): |
| 39 """Runs delta testing between 2 different Findit versions.""" |
| 40 argparser = argparse.ArgumentParser( |
| 41 description='Run delta test between 2 findit versions.') |
| 42 |
| 43 argparser.add_argument( |
| 44 '--revisions', |
| 45 '-r', |
| 46 nargs='+', |
| 47 default=['HEAD^', 'HEAD'], |
| 48 help=('2 findit revisions to be compared. It can take ' |
| 49 '1 or 2 revisions.\n' |
| 50 '(1)-r rev1 rev2: compare rev1 and rev2\n' |
| 51 '(2)-r rev: compare rev and current HEAD\n' |
| 52 '(3)no revisions provided, default to compare ' |
| 53 'HEAD^ and HEAD')) |
| 54 |
| 55 argparser.add_argument( |
| 56 '--client', |
| 57 '-c', |
| 58 default='fracas', |
| 59 help=('Possible values are: fracas, cracas, clusterfuzz. Right now, only ' |
| 60 'fracas is supported.')) |
| 61 |
| 62 argparser.add_argument( |
| 63 '--app', |
| 64 '-a', |
| 65 default=os.getenv('APP_ID', 'findit-for-me-dev'), |
| 66 help=('App id of the App engine app that query needs to access. ' |
| 67 'Defualts to findit-for-me-dev. You can set enviroment variable by' |
| 68 ' \'export APP_ID=your-app-id\' to replace the default value.')) |
| 69 |
| 70 argparser.add_argument( |
| 71 '--since', |
| 72 '-s', |
| 73 default=_A_YEAR_AGO, |
| 74 help=('Query data since this date (including this date). ' |
| 75 'Should be in YYYY-MM-DD format. E.g. 2015-09-31. ' |
| 76 'Defaults to a year ago.')) |
| 77 |
| 78 argparser.add_argument( |
| 79 '--until', |
| 80 '-u', |
| 81 default=_TODAY, |
| 82 help=('Query data until this date (not including this date). ' |
| 83 'Should be in YYYY-MM-DD format. E.g. 2015-09-31. ' |
| 84 'Defaults to today.')) |
| 85 |
| 86 argparser.add_argument( |
| 87 '--batch', |
| 88 '-b', |
| 89 type=int, |
| 90 default=_DEFAULT_BATCH_SIZE, |
| 91 help=('The size of batch that can be processed at one time.\n' |
| 92 'Note, the batch size cannot be greater than 1000, or app engine ' |
| 93 'APIs would fail.\nDefaults to maximum number 1000.')) |
| 94 |
| 95 argparser.add_argument( |
| 96 '--verbose', |
| 97 '-v', |
| 98 action='store_true', |
| 99 default=False, |
| 100 help='Print findit results. Defaults to False.') |
| 101 |
| 102 args = argparser.parse_args() |
| 103 |
| 104 # If in verbose mode, prints debug information. |
| 105 if args.verbose: |
| 106 logging.basicConfig(level=logging.DEBUG) |
| 107 else: |
| 108 logging.basicConfig(level=logging.INFO) |
| 109 |
| 110 if len(args.revisions) > 2: |
| 111 logging.error('Only support delta test between 2 versions.') |
| 112 sys.exit(1) |
| 113 |
| 114 if args.batch > _MAX_BATCH_SIZE: |
| 115 logging.error('Batch size cannot be greater than %s, or app engine APIs ' |
| 116 'would fail.', _MAX_BATCH_SIZE) |
| 117 sys.exit(1) |
| 118 |
| 119 # If only one revision provided, default the rev2 to HEAD. |
| 120 if len(args.revisions) == 1: |
| 121 args.revisions.append('HEAD') |
| 122 |
| 123 git_hash1 = delta_util.ParseGitHash(args.revisions[0]) |
| 124 git_hash2 = delta_util.ParseGitHash(args.revisions[1]) |
| 125 |
| 126 delta_result_prefix = '%s_%s_%s..%s.delta' % (git_hash1[:7], git_hash2[:7], |
| 127 args.since, args.until) |
| 128 delta_csv_path = os.path.join( |
| 129 DELTA_RESULTS_DIRECTORY, '%s.csv' % delta_result_prefix) |
| 130 delta_path = os.path.join( |
| 131 DELTA_RESULTS_DIRECTORY, '.%s' % delta_result_prefix) |
| 132 |
| 133 # Check if delta results already existed. |
| 134 if os.path.exists(delta_csv_path): |
| 135 logging.info('Delta results existed in\n%s', delta_csv_path) |
| 136 if not os.path.exists(delta_path): |
| 137 logging.info('Cannot print out delta results, ' |
| 138 'please open %s to see the results.') |
| 139 return |
| 140 |
| 141 with open(delta_path) as f: |
| 142 deltas, crash_num = pickle.load(f) |
| 143 else: |
| 144 logging.info('Running delta test...') |
| 145 # Get delta of results between git_hash1 and git_hash2. |
| 146 deltas, crash_num = delta_test.DeltaEvaluator( |
| 147 git_hash1, git_hash2, args.client, args.app, |
| 148 start_date=args.since, end_date=args.until, |
| 149 batch_size=args.batch, verbose=args.verbose) |
| 150 delta_util.FlushResult([deltas, crash_num], delta_path) |
| 151 delta_util.WriteDeltaToCSV(deltas, crash_num, |
| 152 git_hash1, git_hash2, delta_csv_path) |
| 153 |
| 154 # Print delta results to users. |
| 155 logging.info('\n========================= Summary =========================') |
| 156 delta_util.PrintDelta(deltas, crash_num) |
| 157 |
| 158 |
| 159 if __name__ == '__main__': |
| 160 RunDeltaTest() |
| OLD | NEW |