| OLD | NEW |
| 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import json | 5 import json |
| 6 import math | 6 import math |
| 7 import os |
| 7 | 8 |
| 8 from telemetry.core import util | 9 from telemetry.core import util |
| 9 from telemetry.page import page_measurement | 10 from telemetry.page import page_measurement |
| 11 from telemetry.page import page_set |
| 10 | 12 |
| 11 | 13 |
| 12 def _GeometricMean(values): | 14 def _GeometricMean(values): |
| 13 """Compute a rounded geometric mean from an array of values.""" | 15 """Compute a rounded geometric mean from an array of values.""" |
| 14 if not values: | 16 if not values: |
| 15 return None | 17 return None |
| 16 # To avoid infinite value errors, make sure no value is less than 0.001. | 18 # To avoid infinite value errors, make sure no value is less than 0.001. |
| 17 new_values = [] | 19 new_values = [] |
| 18 for value in values: | 20 for value in values: |
| 19 if value > 0.001: | 21 if value > 0.001: |
| 20 new_values.append(value) | 22 new_values.append(value) |
| 21 else: | 23 else: |
| 22 new_values.append(0.001) | 24 new_values.append(0.001) |
| 23 # Compute the sum of the log of the values. | 25 # Compute the sum of the log of the values. |
| 24 log_sum = sum(map(math.log, new_values)) | 26 log_sum = sum(map(math.log, new_values)) |
| 25 # Raise e to that sum over the number of values. | 27 # Raise e to that sum over the number of values. |
| 26 mean = math.pow(math.e, (log_sum / len(new_values))) | 28 mean = math.pow(math.e, (log_sum / len(new_values))) |
| 27 # Return the rounded mean. | 29 # Return the rounded mean. |
| 28 return int(round(mean)) | 30 return int(round(mean)) |
| 29 | 31 |
| 30 | 32 |
| 31 SCORE_UNIT = 'score (bigger is better)' | 33 SCORE_UNIT = 'score (bigger is better)' |
| 32 SCORE_TRACE_NAME = 'score' | 34 SCORE_TRACE_NAME = 'score' |
| 33 | 35 |
| 34 | 36 |
| 35 class DomPerf(page_measurement.PageMeasurement): | 37 class DomPerf(page_measurement.PageMeasurement): |
| 38 def CreatePageSet(self, options): |
| 39 BASE_PAGE = 'file:///../../../data/dom_perf/run.html?reportInJS=1&run=' |
| 40 return page_set.PageSet.FromDict({ |
| 41 'pages': [ |
| 42 { 'url': BASE_PAGE + 'Accessors' }, |
| 43 { 'url': BASE_PAGE + 'CloneNodes' }, |
| 44 { 'url': BASE_PAGE + 'CreateNodes' }, |
| 45 { 'url': BASE_PAGE + 'DOMDivWalk' }, |
| 46 { 'url': BASE_PAGE + 'DOMTable' }, |
| 47 { 'url': BASE_PAGE + 'DOMWalk' }, |
| 48 { 'url': BASE_PAGE + 'Events' }, |
| 49 { 'url': BASE_PAGE + 'Get+Elements' }, |
| 50 { 'url': BASE_PAGE + 'GridSort' }, |
| 51 { 'url': BASE_PAGE + 'Template' } |
| 52 ] |
| 53 }, os.path.abspath(__file__)) |
| 54 |
| 36 @property | 55 @property |
| 37 def results_are_the_same_on_every_page(self): | 56 def results_are_the_same_on_every_page(self): |
| 38 return False | 57 return False |
| 39 | 58 |
| 40 def MeasurePage(self, page, tab, results): | 59 def MeasurePage(self, page, tab, results): |
| 41 try: | 60 try: |
| 42 def _IsDone(): | 61 def _IsDone(): |
| 43 return tab.GetCookieByName('__domperf_finished') == '1' | 62 return tab.GetCookieByName('__domperf_finished') == '1' |
| 44 util.WaitFor(_IsDone, 600, poll_interval=5) | 63 util.WaitFor(_IsDone, 600, poll_interval=5) |
| 45 | 64 |
| 46 data = json.loads(tab.EvaluateJavaScript('__domperf_result')) | 65 data = json.loads(tab.EvaluateJavaScript('__domperf_result')) |
| 47 for suite in data['BenchmarkSuites']: | 66 for suite in data['BenchmarkSuites']: |
| 48 # Skip benchmarks that we didn't actually run this time around. | 67 # Skip benchmarks that we didn't actually run this time around. |
| 49 if len(suite['Benchmarks']) or suite['score']: | 68 if len(suite['Benchmarks']) or suite['score']: |
| 50 results.Add(SCORE_TRACE_NAME, SCORE_UNIT, | 69 results.Add(SCORE_TRACE_NAME, SCORE_UNIT, |
| 51 suite['score'], suite['name'], 'unimportant') | 70 suite['score'], suite['name'], 'unimportant') |
| 52 finally: | 71 finally: |
| 53 tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"') | 72 tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"') |
| 54 | 73 |
| 55 def DidRunPageSet(self, tab, results): | 74 def DidRunPageSet(self, tab, results): |
| 56 # Now give the geometric mean as the total for the combined runs. | 75 # Now give the geometric mean as the total for the combined runs. |
| 57 scores = [] | 76 scores = [] |
| 58 for result in results.page_results: | 77 for result in results.page_results: |
| 59 scores.append(result[SCORE_TRACE_NAME].output_value) | 78 scores.append(result[SCORE_TRACE_NAME].output_value) |
| 60 total = _GeometricMean(scores) | 79 total = _GeometricMean(scores) |
| 61 results.AddSummary(SCORE_TRACE_NAME, SCORE_UNIT, total, 'Total') | 80 results.AddSummary(SCORE_TRACE_NAME, SCORE_UNIT, total, 'Total') |
| OLD | NEW |