Chromium Code Reviews| Index: scripts/slave/runtest.py |
| diff --git a/scripts/slave/runtest.py b/scripts/slave/runtest.py |
| index d169d54102b728f4f53cb825d8ff8822875754b1..16a42ee791fb8512188ad4759a191eaac4d86abc 100755 |
| --- a/scripts/slave/runtest.py |
| +++ b/scripts/slave/runtest.py |
| @@ -54,6 +54,7 @@ from slave import gtest_slave_utils |
| from slave import process_log_utils |
| from slave import results_dashboard |
| from slave import slave_utils |
| +from slave import telemetry_utils |
| from slave import xvfb |
| USAGE = '%s [options] test.exe [test args]' % os.path.basename(sys.argv[0]) |
| @@ -553,11 +554,12 @@ def _ListParsers(selection): |
| return shouldlist |
| -def _SelectResultsTracker(options): |
| +def _SelectResultsTracker(options, test_exe): |
| """Returns a log parser class (aka results tracker class). |
| Args: |
| options: Command-line options (from OptionParser). |
| + test_exe: Name of the test to execute |
| Returns: |
| A log parser class (aka results tracker class), or None. |
| @@ -565,6 +567,9 @@ def _SelectResultsTracker(options): |
| if _UsingGtestJson(options): |
| return gtest_utils.GTestJSONParser |
| + if test_exe and test_exe.endswith('telemetry.py'): |
| + return telemetry_utils.TelemetryResultsTracker |
| + |
| parsers = _GetParsers() |
| if options.annotate: |
| if options.annotate in parsers: |
| @@ -589,6 +594,46 @@ def _GetCommitPos(build_properties): |
| return int(re.search(r'{#(\d+)}', commit_pos).group(1)) |
| +def _GetMainRevision(options): |
| + build_dir = os.path.abspath(options.build_dir) |
| + commit_pos_num = _GetCommitPos(options.build_properties) |
| + if commit_pos_num is not None: |
| + revision = commit_pos_num |
| + elif options.revision: |
| + revision = options.revision |
| + else: |
| + revision = _GetRevision(os.path.dirname(build_dir)) |
| + return revision |
| + |
| + |
| +def _GetBlinkRevision(options): |
| + build_dir = os.path.abspath(options.build_dir) |
| + |
| + if options.webkit_revision: |
| + webkit_revision = options.webkit_revision |
| + else: |
| + try: |
| + webkit_dir = chromium_utils.FindUpward( |
| + build_dir, 'third_party', 'WebKit', 'Source') |
| + webkit_revision = _GetRevision(webkit_dir) |
| + except Exception: |
| + webkit_revision = None |
| + return webkit_revision |
| + |
| + |
| +def _GetTelemetryRevisions(options): |
| + """Fills in the same revisions fields that process_log_utils does.""" |
| + |
| + versions = {} |
| + versions['rev'] = _GetMainRevision(options) |
| + versions['webkit_rev'] = _GetBlinkRevision(options) |
| + versions['webrtc_rev'] = options.build_properties.get('got_webrtc_revision') |
| + versions['v8_rev'] = options.build_properties.get('got_v8_revision') |
| + versions['ver'] = options.build_properties.get('version') |
| + versions['git_revision'] = options.build_properties.get('git_revision') |
| + return versions |
| + |
| + |
| def _CreateResultsTracker(tracker_class, options): |
| """Instantiate a log parser (aka results tracker). |
| @@ -602,30 +647,13 @@ def _CreateResultsTracker(tracker_class, options): |
| if not tracker_class: |
| return None |
| - if tracker_class.__name__ in ('GTestLogParser',): |
| + if tracker_class.__name__ in ('GTestLogParser', 'TelemetryResultsTracker'): |
| tracker_obj = tracker_class() |
| elif tracker_class.__name__ in ('GTestJSONParser',): |
| tracker_obj = tracker_class(options.build_properties.get('mastername')) |
| else: |
| - build_dir = os.path.abspath(options.build_dir) |
| - |
| - if options.webkit_revision: |
| - webkit_revision = options.webkit_revision |
| - else: |
| - try: |
| - webkit_dir = chromium_utils.FindUpward( |
| - build_dir, 'third_party', 'WebKit', 'Source') |
| - webkit_revision = _GetRevision(webkit_dir) |
| - except Exception: |
| - webkit_revision = 'undefined' |
| - |
| - commit_pos_num = _GetCommitPos(options.build_properties) |
| - if commit_pos_num is not None: |
| - revision = commit_pos_num |
| - elif options.revision: |
| - revision = options.revision |
| - else: |
| - revision = _GetRevision(os.path.dirname(build_dir)) |
| + webkit_revision = _GetBlinkRevision(options) or 'undefined' |
| + revision = _GetMainRevision(options) or 'undefined' |
| tracker_obj = tracker_class( |
| revision=revision, |
| @@ -660,41 +688,45 @@ def _GetSupplementalColumns(build_dir, supplemental_colummns_file_name): |
| return supplemental_columns |
| -def _SendResultsToDashboard(results_tracker, system, test, url, build_dir, |
| - mastername, buildername, buildnumber, |
| - supplemental_columns_file, extra_columns=None): |
| +def _SendResultsToDashboard(results_tracker, options): |
|
ghost stip (do not use)
2014/09/09 02:20:06
iannucci@ strongly dislikes slinging the options o
sullivan
2014/09/11 00:25:56
Done.
|
| """Sends results from a results tracker (aka log parser) to the dashboard. |
| Args: |
| results_tracker: An instance of a log parser class, which has been used to |
| process the test output, so it contains the test results. |
| - system: A string such as 'linux-release', which comes from perf_id. |
| - test: Test "suite" name string. |
| - url: Dashboard URL. |
| - build_dir: Build dir name (used for cache file by results_dashboard). |
| - mastername: Buildbot master name, e.g. 'chromium.perf'. |
| - WARNING! This is incorrectly called "masterid" in some parts of the |
| - dashboard code. |
| - buildername: Builder name, e.g. 'Linux QA Perf (1)' |
| - buildnumber: Build number (as a string). |
| - supplemental_columns_file: Filename for JSON supplemental columns file. |
| - extra_columns: A dict of extra values to add to the supplemental columns |
| - dict. |
| + options: Program arguments. |
| """ |
| + system = _GetPerfID(options) |
| + test = options.test_type |
| + url = options.results_url |
| + mastername = options.build_properties.get('mastername') |
| + buildername = options.build_properties.get('buildername') |
| + buildnumber = options.build_properties.get('buildnumber') |
| + supplemental_columns_file = options.supplemental_columns_file |
| + extra_columns = options.perf_config |
| + |
| if system is None: |
| # perf_id not specified in factory properties. |
| print 'Error: No system name (perf_id) specified when sending to dashboard.' |
| return |
| + build_dir = os.path.abspath(options.build_dir) |
| supplemental_columns = _GetSupplementalColumns( |
| build_dir, supplemental_columns_file) |
| if extra_columns: |
| supplemental_columns.update(extra_columns) |
| - charts = _GetDataFromLogProcessor(results_tracker) |
| - points = results_dashboard.MakeListOfPoints( |
| - charts, system, test, mastername, buildername, buildnumber, |
| - supplemental_columns) |
| - results_dashboard.SendResults(points, url, build_dir) |
| + if results_tracker.IsChartJson(): |
| + results_dashboard.SendChartJsonResults( |
| + results_tracker.ChartJson(), results_tracker.RefJson(), |
| + _GetTelemetryRevisions(options), |
| + system, mastername, buildername, buildnumber, |
| + supplemental_columns, url, build_dir) |
| + else: |
| + charts = _GetDataFromLogProcessor(results_tracker) |
| + points = results_dashboard.MakeListOfPoints( |
| + charts, system, test, mastername, buildername, buildnumber, |
| + supplemental_columns) |
| + results_dashboard.SendResults(points, url, build_dir) |
| def _GetDataFromLogProcessor(log_processor): |
| @@ -962,7 +994,7 @@ def _MainParse(options, _args): |
| if _ListParsers(options.annotate): |
| return 0 |
| - tracker_class = _SelectResultsTracker(options) |
| + tracker_class = _SelectResultsTracker(options, None) |
| results_tracker = _CreateResultsTracker(tracker_class, options) |
| if options.generate_json_file: |
| @@ -1029,8 +1061,10 @@ def _MainMac(options, args, extra_env): |
| # If --annotate=list was passed, list the log parser classes and exit. |
| if _ListParsers(options.annotate): |
| return 0 |
| - tracker_class = _SelectResultsTracker(options) |
| + tracker_class = _SelectResultsTracker(options, test_exe) |
| results_tracker = _CreateResultsTracker(tracker_class, options) |
| + if hasattr(results_tracker, 'IsChartJson') and results_tracker.IsChartJson(): |
| + command.extend(results_tracker.GetArguments()) |
| if options.generate_json_file: |
| if os.path.exists(options.test_output_xml): |
| @@ -1079,14 +1113,7 @@ def _MainMac(options, args, extra_env): |
| perf_dashboard_id=options.perf_dashboard_id) |
| if options.results_url: |
| - _SendResultsToDashboard( |
| - results_tracker, _GetPerfID(options), |
| - options.test_type, options.results_url, options.build_dir, |
| - options.build_properties.get('mastername'), |
| - options.build_properties.get('buildername'), |
| - options.build_properties.get('buildnumber'), |
| - options.supplemental_columns_file, |
| - options.perf_config) |
| + _SendResultsToDashboard(results_tracker, options) |
| return result |
| @@ -1282,8 +1309,10 @@ def _MainLinux(options, args, extra_env): |
| # If --annotate=list was passed, list the log parser classes and exit. |
| if _ListParsers(options.annotate): |
| return 0 |
| - tracker_class = _SelectResultsTracker(options) |
| + tracker_class = _SelectResultsTracker(options, test_exe) |
| results_tracker = _CreateResultsTracker(tracker_class, options) |
| + if hasattr(results_tracker, 'IsChartJson') and results_tracker.IsChartJson(): |
| + command.extend(results_tracker.GetArguments()) |
| if options.generate_json_file: |
| if os.path.exists(options.test_output_xml): |
| @@ -1355,14 +1384,7 @@ def _MainLinux(options, args, extra_env): |
| perf_dashboard_id=options.perf_dashboard_id) |
| if options.results_url: |
| - _SendResultsToDashboard( |
| - results_tracker, _GetPerfID(options), |
| - options.test_type, options.results_url, options.build_dir, |
| - options.build_properties.get('mastername'), |
| - options.build_properties.get('buildername'), |
| - options.build_properties.get('buildnumber'), |
| - options.supplemental_columns_file, |
| - options.perf_config) |
| + _SendResultsToDashboard(results_tracker, options) |
| return result |
| @@ -1431,8 +1453,10 @@ def _MainWin(options, args, extra_env): |
| # If --annotate=list was passed, list the log parser classes and exit. |
| if _ListParsers(options.annotate): |
| return 0 |
| - tracker_class = _SelectResultsTracker(options) |
| + tracker_class = _SelectResultsTracker(options, test_exe) |
| results_tracker = _CreateResultsTracker(tracker_class, options) |
| + if hasattr(results_tracker, 'IsChartJson') and results_tracker.IsChartJson(): |
| + command.extend(results_tracker.GetArguments()) |
| if options.generate_json_file: |
| if os.path.exists(options.test_output_xml): |
| @@ -1477,14 +1501,7 @@ def _MainWin(options, args, extra_env): |
| perf_dashboard_id=options.perf_dashboard_id) |
| if options.results_url: |
| - _SendResultsToDashboard( |
| - results_tracker, _GetPerfID(options), |
| - options.test_type, options.results_url, options.build_dir, |
| - options.build_properties.get('mastername'), |
| - options.build_properties.get('buildername'), |
| - options.build_properties.get('buildnumber'), |
| - options.supplemental_columns_file, |
| - options.perf_config) |
| + _SendResultsToDashboard(results_tracker, options) |
| return result |
| @@ -1512,7 +1529,7 @@ def _MainAndroid(options, args, extra_env): |
| if _ListParsers(options.annotate): |
| return 0 |
| - tracker_class = _SelectResultsTracker(options) |
| + tracker_class = _SelectResultsTracker(options, None) |
| results_tracker = _CreateResultsTracker(tracker_class, options) |
| if options.generate_json_file: |
| @@ -1540,14 +1557,7 @@ def _MainAndroid(options, args, extra_env): |
| perf_dashboard_id=options.perf_dashboard_id) |
| if options.results_url: |
| - _SendResultsToDashboard( |
| - results_tracker, _GetPerfID(options), |
| - options.test_type, options.results_url, options.build_dir, |
| - options.build_properties.get('mastername'), |
| - options.build_properties.get('buildername'), |
| - options.build_properties.get('buildnumber'), |
| - options.supplemental_columns_file, |
| - options.perf_config) |
| + _SendResultsToDashboard(results_tracker, options) |
| return result |