OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Basic pyauto performance tests. | 6 """Basic pyauto performance tests. |
7 | 7 |
8 For tests that need to be run for multiple iterations (e.g., so that average | 8 For tests that need to be run for multiple iterations (e.g., so that average |
9 and standard deviation values can be reported), the default number of iterations | 9 and standard deviation values can be reported), the default number of iterations |
10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. | 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. |
(...skipping 568 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
579 timings = [] | 579 timings = [] |
580 for iteration in range(self._num_iterations + 1): | 580 for iteration in range(self._num_iterations + 1): |
581 orig_timeout_count = self._timeout_count | 581 orig_timeout_count = self._timeout_count |
582 elapsed_time = self._MeasureElapsedTime(open_tab_command, | 582 elapsed_time = self._MeasureElapsedTime(open_tab_command, |
583 num_invocations=num_tabs) | 583 num_invocations=num_tabs) |
584 # Only count the timing measurement if no automation call timed out. | 584 # Only count the timing measurement if no automation call timed out. |
585 if self._timeout_count == orig_timeout_count: | 585 if self._timeout_count == orig_timeout_count: |
586 # Ignore the first iteration. | 586 # Ignore the first iteration. |
587 if iteration: | 587 if iteration: |
588 timings.append(elapsed_time) | 588 timings.append(elapsed_time) |
589 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 589 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
590 self._num_iterations, elapsed_time) | 590 self._num_iterations, elapsed_time) |
591 self.assertTrue(self._timeout_count <= self._max_timeout_count, | 591 self.assertTrue(self._timeout_count <= self._max_timeout_count, |
592 msg='Test exceeded automation timeout threshold.') | 592 msg='Test exceeded automation timeout threshold.') |
593 self.assertEqual(1 + num_tabs, self.GetTabCount(), | 593 self.assertEqual(1 + num_tabs, self.GetTabCount(), |
594 msg='Did not open %d new tab(s).' % num_tabs) | 594 msg='Did not open %d new tab(s).' % num_tabs) |
595 for _ in range(num_tabs): | 595 for _ in range(num_tabs): |
596 self.CloseTab(tab_index=1) | 596 self.CloseTab(tab_index=1) |
597 | 597 |
598 self._PrintSummaryResults(description, timings, 'milliseconds', graph_name) | 598 self._PrintSummaryResults(description, timings, 'milliseconds', graph_name) |
599 | 599 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
741 self.CloseTab(tab_index=1) | 741 self.CloseTab(tab_index=1) |
742 return result_dict | 742 return result_dict |
743 | 743 |
744 timings = {} | 744 timings = {} |
745 for iteration in xrange(self._num_iterations + 1): | 745 for iteration in xrange(self._num_iterations + 1): |
746 result_dict = _RunBenchmarkOnce(url) | 746 result_dict = _RunBenchmarkOnce(url) |
747 # Ignore the first iteration. | 747 # Ignore the first iteration. |
748 if iteration: | 748 if iteration: |
749 for key, val in result_dict.items(): | 749 for key, val in result_dict.items(): |
750 timings.setdefault(key, []).append(val) | 750 timings.setdefault(key, []).append(val) |
751 logging.info('Iteration %d of %d:\n%s', iteration, self._num_iterations, | 751 logging.info('Iteration %d of %d:\n%s', iteration + 1, |
752 self.pformat(result_dict)) | 752 self._num_iterations, self.pformat(result_dict)) |
753 | 753 |
754 for key, val in timings.items(): | 754 for key, val in timings.items(): |
755 if key == 'final_score': | 755 if key == 'final_score': |
756 self._PrintSummaryResults('V8Benchmark', val, 'score', | 756 self._PrintSummaryResults('V8Benchmark', val, 'score', |
757 'v8_benchmark_final') | 757 'v8_benchmark_final') |
758 else: | 758 else: |
759 self._PrintSummaryResults('V8Benchmark-%s' % key, val, 'score', | 759 self._PrintSummaryResults('V8Benchmark-%s' % key, val, 'score', |
760 'v8_benchmark_individual') | 760 'v8_benchmark_individual') |
761 | 761 |
762 def testSunSpider(self): | 762 def testSunSpider(self): |
(...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1570 self.DownloadAndWaitForStart(url) | 1570 self.DownloadAndWaitForStart(url) |
1571 self.WaitForAllDownloadsToComplete(timeout=2 * 60 * 1000) # 2 minutes. | 1571 self.WaitForAllDownloadsToComplete(timeout=2 * 60 * 1000) # 2 minutes. |
1572 | 1572 |
1573 timings = [] | 1573 timings = [] |
1574 for iteration in range(self._num_iterations + 1): | 1574 for iteration in range(self._num_iterations + 1): |
1575 elapsed_time = self._MeasureElapsedTime( | 1575 elapsed_time = self._MeasureElapsedTime( |
1576 lambda: _DownloadFile(DOWNLOAD_100MB_URL), num_invocations=1) | 1576 lambda: _DownloadFile(DOWNLOAD_100MB_URL), num_invocations=1) |
1577 # Ignore the first iteration. | 1577 # Ignore the first iteration. |
1578 if iteration: | 1578 if iteration: |
1579 timings.append(elapsed_time) | 1579 timings.append(elapsed_time) |
1580 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 1580 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
1581 self._num_iterations, elapsed_time) | 1581 self._num_iterations, elapsed_time) |
1582 self.SetDownloadShelfVisible(False) | 1582 self.SetDownloadShelfVisible(False) |
1583 _CleanupAdditionalFilesInDir(download_dir, orig_downloads) | 1583 _CleanupAdditionalFilesInDir(download_dir, orig_downloads) |
1584 | 1584 |
1585 self._PrintSummaryResults('Download100MBFile', timings, 'milliseconds', | 1585 self._PrintSummaryResults('Download100MBFile', timings, 'milliseconds', |
1586 'download_file') | 1586 'download_file') |
1587 | 1587 |
1588 # Tell the local server to delete the 100 MB file. | 1588 # Tell the local server to delete the 100 MB file. |
1589 self.NavigateToURL(DELETE_100MB_URL) | 1589 self.NavigateToURL(DELETE_100MB_URL) |
1590 | 1590 |
(...skipping 23 matching lines...) Expand all Loading... | |
1614 self.WaitUntil(_IsUploadComplete, timeout=120, expect_retval=True, | 1614 self.WaitUntil(_IsUploadComplete, timeout=120, expect_retval=True, |
1615 retry_sleep=0.10), | 1615 retry_sleep=0.10), |
1616 msg='Upload failed to complete before the timeout was hit.') | 1616 msg='Upload failed to complete before the timeout was hit.') |
1617 | 1617 |
1618 timings = [] | 1618 timings = [] |
1619 for iteration in range(self._num_iterations + 1): | 1619 for iteration in range(self._num_iterations + 1): |
1620 elapsed_time = self._MeasureElapsedTime(_RunSingleUpload) | 1620 elapsed_time = self._MeasureElapsedTime(_RunSingleUpload) |
1621 # Ignore the first iteration. | 1621 # Ignore the first iteration. |
1622 if iteration: | 1622 if iteration: |
1623 timings.append(elapsed_time) | 1623 timings.append(elapsed_time) |
1624 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 1624 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
1625 self._num_iterations, elapsed_time) | 1625 self._num_iterations, elapsed_time) |
1626 | 1626 |
1627 self._PrintSummaryResults('Upload50MBFile', timings, 'milliseconds', | 1627 self._PrintSummaryResults('Upload50MBFile', timings, 'milliseconds', |
1628 'upload_file') | 1628 'upload_file') |
1629 | 1629 |
1630 | 1630 |
1631 class FrameTimes(object): | |
1632 """Container for a list of frame times.""" | |
1633 | |
1634 def __init__(self, frame_times): | |
1635 self._frame_times = frame_times | |
1636 | |
1637 def GetFps(self): | |
1638 if not self._frame_times: | |
1639 return 0 | |
1640 avg = sum(self._frame_times) / len(self._frame_times) | |
1641 if not avg: | |
1642 return 0 | |
1643 return int(1000.0 / avg) | |
1644 | |
1645 def GetMeanFrameTime(self): | |
1646 return Mean(self._frame_times) | |
1647 | |
1648 def GetPercentBelow60Fps(self): | |
1649 if not self._frame_times: | |
1650 return 0 | |
1651 threshold = math.ceil(1000 / 60.) | |
1652 num_frames_below_60 = len([t for t in self._frame_times if t > threshold]) | |
1653 num_frames = len(self._frame_times) | |
1654 return (100. * num_frames_below_60) / num_frames | |
1655 | |
1656 | |
1657 class ScrollResults(object): | 1631 class ScrollResults(object): |
1658 """Container for ScrollTest results.""" | 1632 """Container for ScrollTest results.""" |
1659 | 1633 |
1660 def __init__(self, first_paint_seconds, frame_times_lists): | 1634 def __init__(self, first_paint_seconds, results_list): |
1661 assert len(frame_times_lists) == 2, 'Expecting initial and repeat times' | 1635 assert len(results_list) == 2, 'Expecting initial and repeat results.' |
1662 self.first_paint_time = 1000.0 * first_paint_seconds | 1636 self._first_paint_time = 1000.0 * first_paint_seconds |
1663 self.initial_frame_times = FrameTimes(frame_times_lists[0]) | 1637 self._results_list = results_list |
1664 self.repeat_frame_times = FrameTimes(frame_times_lists[1]) | 1638 |
1639 def GetFirstPaintTime(self): | |
1640 return self._first_paint_time | |
1641 | |
1642 def GetFrameCount(self, index): | |
1643 results = self._results_list[index] | |
1644 return results.get('numFramesSentToScreen', results['numAnimationFrames']) | |
1645 | |
1646 def GetFps(self, index): | |
1647 return (self.GetFrameCount(index) / | |
1648 self._results_list[index]['totalTimeInSeconds']) | |
1649 | |
1650 def GetMeanFrameTime(self, index): | |
1651 return (self._results_list[index]['totalTimeInSeconds'] / | |
1652 self.GetFrameCount(index)) | |
1653 | |
1654 def GetPercentBelow60Fps(self, index): | |
1655 return (float(self._results_list[index]['droppedFrameCount']) / | |
1656 self.GetFrameCount(index)) | |
1665 | 1657 |
1666 | 1658 |
1667 class BaseScrollTest(BasePerfTest): | 1659 class BaseScrollTest(BasePerfTest): |
1668 """Base class for tests measuring scrolling performance.""" | 1660 """Base class for tests measuring scrolling performance.""" |
1669 | 1661 |
1670 def setUp(self): | 1662 def setUp(self): |
1671 """Performs necessary setup work before running each test.""" | 1663 """Performs necessary setup work before running each test.""" |
1672 super(BaseScrollTest, self).setUp() | 1664 super(BaseScrollTest, self).setUp() |
1673 scroll_file = os.path.join(self.DataDir(), 'scroll', 'scroll.js') | 1665 scroll_file = os.path.join(self.DataDir(), 'scroll', 'scroll.js') |
1674 with open(scroll_file) as f: | 1666 with open(scroll_file) as f: |
1675 self._scroll_text = f.read() | 1667 self._scroll_text = f.read() |
1676 | 1668 |
1669 def ExtraChromeFlags(self): | |
1670 """Ensures Chrome is launched with custom flags. | |
1671 | |
1672 Returns: | |
1673 A list of extra flags to pass to Chrome when it is launched. | |
1674 """ | |
1675 # Extra flag used by scroll performance tests. | |
1676 return (super(BaseScrollTest, self).ExtraChromeFlags() + | |
1677 ['--enable-gpu-benchmarking']) | |
1678 | |
1677 def RunSingleInvocation(self, url, setup_js=''): | 1679 def RunSingleInvocation(self, url, setup_js=''): |
1678 """Runs a single invocation of the scroll test. | 1680 """Runs a single invocation of the scroll test. |
1679 | 1681 |
1680 Args: | 1682 Args: |
1681 url: The string url for the webpage on which to run the scroll test. | 1683 url: The string url for the webpage on which to run the scroll test. |
1682 setup_js: String representing additional Javascript setup code to execute | 1684 setup_js: String representing additional Javascript setup code to execute |
1683 in the webpage immediately before running the scroll test. | 1685 in the webpage immediately before running the scroll test. |
1684 | 1686 |
1685 Returns: | 1687 Returns: |
1686 Instance of ScrollResults. | 1688 Instance of ScrollResults. |
1687 """ | 1689 """ |
1690 | |
1688 self.assertTrue(self.AppendTab(pyauto.GURL(url)), | 1691 self.assertTrue(self.AppendTab(pyauto.GURL(url)), |
1689 msg='Failed to append tab for webpage.') | 1692 msg='Failed to append tab for webpage.') |
1690 | 1693 |
1691 js = """ | 1694 timeout = pyauto.PyUITest.ActionTimeoutChanger(self, 300 * 1000) # ms |
1692 %s | 1695 test_js = '%s; %s; new ScrollTest();' % (self._scroll_text, setup_js) |
1693 %s | 1696 results = simplejson.loads(self.ExecuteJavascript(test_js, tab_index=1)) |
1694 __scroll_test(); | |
1695 window.domAutomationController.send('done'); | |
1696 """ % (self._scroll_text, setup_js) | |
1697 self.ExecuteJavascript(js, tab_index=1) | |
1698 | 1697 |
1699 # Poll the webpage until the test is complete. | 1698 first_paint_js = ('window.domAutomationController.send(' |
1700 def IsTestComplete(): | 1699 '(chrome.loadTimes().firstPaintTime - ' |
1701 done_js = """ | 1700 'chrome.loadTimes().requestTime).toString());') |
1702 if (__scrolling_complete) | 1701 first_paint_time = float(self.ExecuteJavascript(first_paint_js, |
1703 window.domAutomationController.send('complete'); | 1702 tab_index=1)) |
1704 else | |
1705 window.domAutomationController.send('incomplete'); | |
1706 """ | |
1707 return self.ExecuteJavascript(done_js, tab_index=1) == 'complete' | |
1708 | 1703 |
1709 self.assertTrue( | 1704 self.CloseTab(tab_index=1) |
1710 self.WaitUntil(IsTestComplete, timeout=300, expect_retval=True, | |
1711 retry_sleep=1), | |
1712 msg='Timed out when waiting for scrolling tests to complete.') | |
1713 | 1705 |
1714 # Get the scroll test results from the webpage. | 1706 return ScrollResults(first_paint_time, results) |
1715 results_js = """ | |
1716 var __stringify = JSON.stringify || JSON.encode; | |
1717 window.domAutomationController.send(__stringify({ | |
1718 'first_paint_time': chrome.loadTimes().firstPaintTime - | |
1719 chrome.loadTimes().requestTime, | |
1720 'frame_times': __frame_times, | |
1721 })); | |
1722 """ | |
1723 results = eval(self.ExecuteJavascript(results_js, tab_index=1)) | |
1724 self.CloseTab(tab_index=1) | |
1725 return ScrollResults(results['first_paint_time'], results['frame_times']) | |
1726 | 1707 |
1727 def RunScrollTest(self, url, description, graph_name, setup_js=''): | 1708 def RunScrollTest(self, url, description, graph_name, setup_js=''): |
1728 """Runs a scroll performance test on the specified webpage. | 1709 """Runs a scroll performance test on the specified webpage. |
1729 | 1710 |
1730 Args: | 1711 Args: |
1731 url: The string url for the webpage on which to run the scroll test. | 1712 url: The string url for the webpage on which to run the scroll test. |
1732 description: A string description for the particular test being run. | 1713 description: A string description for the particular test being run. |
1733 graph_name: A string name for the performance graph associated with this | 1714 graph_name: A string name for the performance graph associated with this |
1734 test. Only used on Chrome desktop. | 1715 test. Only used on Chrome desktop. |
1735 setup_js: String representing additional Javascript setup code to execute | 1716 setup_js: String representing additional Javascript setup code to execute |
1736 in the webpage immediately before running the scroll test. | 1717 in the webpage immediately before running the scroll test. |
1737 """ | 1718 """ |
1738 results = [] | 1719 results = [] |
1739 for iteration in range(self._num_iterations + 1): | 1720 for iteration in range(self._num_iterations + 1): |
1740 result = self.RunSingleInvocation(url, setup_js) | 1721 result = self.RunSingleInvocation(url, setup_js) |
1741 # Ignore the first iteration. | 1722 # Ignore the first iteration. |
1742 if iteration: | 1723 if iteration: |
1743 fps = result.repeat_frame_times.GetFps() | 1724 fps = result.GetFps(1) |
1744 assert fps, '%s did not scroll' % url | 1725 assert fps, '%s did not scroll' % url |
1745 logging.info('Iteration %d of %d: %f fps', iteration, | 1726 logging.info('Iteration %d of %d: %f fps', iteration + 1, |
1746 self._num_iterations, fps) | 1727 self._num_iterations, fps) |
1747 results.append(result) | 1728 results.append(result) |
1748 self._PrintSummaryResults( | 1729 self._PrintSummaryResults( |
1749 description, [r.repeat_frame_times.GetFps() for r in results], | 1730 description, [r.GetFps(1) for r in results], |
1750 'FPS', graph_name) | 1731 'FPS', graph_name) |
1751 | 1732 |
1752 | 1733 |
1753 class PopularSitesScrollTest(BaseScrollTest): | 1734 class PopularSitesScrollTest(BaseScrollTest): |
1754 """Measures scrolling performance on recorded versions of popular sites.""" | 1735 """Measures scrolling performance on recorded versions of popular sites.""" |
1755 | 1736 |
1756 def ExtraChromeFlags(self): | 1737 def ExtraChromeFlags(self): |
1757 """Ensures Chrome is launched with custom flags. | 1738 """Ensures Chrome is launched with custom flags. |
1758 | 1739 |
1759 Returns: | 1740 Returns: |
1760 A list of extra flags to pass to Chrome when it is launched. | 1741 A list of extra flags to pass to Chrome when it is launched. |
1761 """ | 1742 """ |
1762 return super(PopularSitesScrollTest, | 1743 return super(PopularSitesScrollTest, |
1763 self).ExtraChromeFlags() + PageCyclerReplay.CHROME_FLAGS | 1744 self).ExtraChromeFlags() + PageCyclerReplay.CHROME_FLAGS |
1764 | 1745 |
1765 def _GetUrlList(self, test_name): | 1746 def _GetUrlList(self, test_name): |
1766 """Returns list of recorded sites.""" | 1747 """Returns list of recorded sites.""" |
1767 sites_path = PageCyclerReplay.Path('page_sets', test_name=test_name) | 1748 sites_path = PageCyclerReplay.Path('page_sets', test_name=test_name) |
1768 with open(sites_path) as f: | 1749 with open(sites_path) as f: |
1769 sites_text = f.read() | 1750 sites_text = f.read() |
1770 js = """ | 1751 js = """ |
1771 %s | 1752 %s |
1772 window.domAutomationController.send(JSON.stringify(pageSets)); | 1753 window.domAutomationController.send(JSON.stringify(pageSets)); |
1773 """ % sites_text | 1754 """ % sites_text |
1774 page_sets = eval(self.ExecuteJavascript(js)) | 1755 page_sets = eval(self.ExecuteJavascript(js)) |
1775 return list(itertools.chain(*page_sets))[1:] # Skip first. | 1756 return list(itertools.chain(*page_sets))[1:] # Skip first. |
1776 | 1757 |
1777 def _PrintScrollResults(self, results): | 1758 def _PrintScrollResults(self, results): |
1778 self._PrintSummaryResults( | 1759 self._PrintSummaryResults( |
1779 'initial', [r.initial_frame_times.GetMeanFrameTime() for r in results], | 1760 'initial', [r.GetMeanFrameTime(0) for r in results], |
1780 'ms', 'FrameTimes') | 1761 'ms', 'FrameTimes') |
1781 self._PrintSummaryResults( | 1762 self._PrintSummaryResults( |
1782 'repeat', [r.repeat_frame_times.GetMeanFrameTime() for r in results], | 1763 'repeat', [r.GetMeanFrameTime(1) for r in results], |
1783 'ms', 'FrameTimes') | 1764 'ms', 'FrameTimes') |
1784 self._PrintSummaryResults( | 1765 self._PrintSummaryResults( |
1785 'initial', | 1766 'initial', |
1786 [r.initial_frame_times.GetPercentBelow60Fps() for r in results], | 1767 [r.GetPercentBelow60Fps(0) for r in results], |
1787 'percent', 'PercentBelow60FPS') | 1768 'percent', 'PercentBelow60FPS') |
1788 self._PrintSummaryResults( | 1769 self._PrintSummaryResults( |
1789 'repeat', | 1770 'repeat', |
1790 [r.repeat_frame_times.GetPercentBelow60Fps() for r in results], | 1771 [r.GetPercentBelow60Fps(1) for r in results], |
1791 'percent', 'PercentBelow60FPS') | 1772 'percent', 'PercentBelow60FPS') |
1792 self._PrintSummaryResults( | 1773 self._PrintSummaryResults( |
1793 'first_paint_time', [r.first_paint_time for r in results], | 1774 'first_paint_time', [r.GetFirstPaintTime() for r in results], |
1794 'ms', 'FirstPaintTime') | 1775 'ms', 'FirstPaintTime') |
1795 | 1776 |
1796 def test2012Q3(self): | 1777 def test2012Q3(self): |
1797 test_name = '2012Q3' | 1778 test_name = '2012Q3' |
1798 urls = self._GetUrlList(test_name) | 1779 urls = self._GetUrlList(test_name) |
1799 results = [] | 1780 results = [] |
1800 with PageCyclerReplay.ReplayServer(test_name) as replay_server: | 1781 with PageCyclerReplay.ReplayServer(test_name) as replay_server: |
1801 if replay_server.is_record_mode: | 1782 if replay_server.is_record_mode: |
1802 self._num_iterations = 1 | 1783 self._num_iterations = 1 |
1803 for iteration in range(self._num_iterations): | 1784 for iteration in range(self._num_iterations): |
1804 for url in urls: | 1785 for url in urls: |
1805 result = self.RunSingleInvocation(url) | 1786 result = self.RunSingleInvocation(url) |
1806 fps = result.initial_frame_times.GetFps() | 1787 fps = result.GetFps(0) |
1807 assert fps, '%s did not scroll' % url | 1788 assert fps, '%s did not scroll' % url |
1808 logging.info('Iteration %d of %d: %f fps', iteration, | 1789 logging.info('Iteration %d of %d: %f fps', iteration + 1, |
1809 self._num_iterations, fps) | 1790 self._num_iterations, fps) |
1810 results.append(result) | 1791 results.append(result) |
1811 self._PrintScrollResults(results) | 1792 self._PrintScrollResults(results) |
1812 | 1793 |
1813 | 1794 |
1814 class ScrollTest(BaseScrollTest): | 1795 class ScrollTest(BaseScrollTest): |
1815 """Tests to measure scrolling performance.""" | 1796 """Tests to measure scrolling performance.""" |
1816 | 1797 |
1817 def ExtraChromeFlags(self): | 1798 def ExtraChromeFlags(self): |
1818 """Ensures Chrome is launched with custom flags. | 1799 """Ensures Chrome is launched with custom flags. |
(...skipping 19 matching lines...) Expand all Loading... | |
1838 def testGooglePlusScroll(self): | 1819 def testGooglePlusScroll(self): |
1839 """Runs the scroll test on a Google Plus anonymized page.""" | 1820 """Runs the scroll test on a Google Plus anonymized page.""" |
1840 self.RunScrollTest( | 1821 self.RunScrollTest( |
1841 self.GetFileURLForDataPath('scroll', 'plus.html'), | 1822 self.GetFileURLForDataPath('scroll', 'plus.html'), |
1842 'ScrollGooglePlusPage', 'scroll_fps') | 1823 'ScrollGooglePlusPage', 'scroll_fps') |
1843 | 1824 |
1844 def testGmailScroll(self): | 1825 def testGmailScroll(self): |
1845 """Runs the scroll test using the live Gmail site.""" | 1826 """Runs the scroll test using the live Gmail site.""" |
1846 self._LoginToGoogleAccount(account_key='test_google_account_gmail') | 1827 self._LoginToGoogleAccount(account_key='test_google_account_gmail') |
1847 self.RunScrollTest('http://www.gmail.com', 'ScrollGmail', 'scroll_fps', | 1828 self.RunScrollTest('http://www.gmail.com', 'ScrollGmail', 'scroll_fps', |
1848 setup_js='__is_gmail_test = true;') | 1829 setup_js='__isGmailTest = true;') |
1849 | 1830 |
1850 | 1831 |
1851 class FlashTest(BasePerfTest): | 1832 class FlashTest(BasePerfTest): |
1852 """Tests to measure flash performance.""" | 1833 """Tests to measure flash performance.""" |
1853 | 1834 |
1854 def _RunFlashTestForAverageFPS(self, webpage_url, description, graph_name): | 1835 def _RunFlashTestForAverageFPS(self, webpage_url, description, graph_name): |
1855 """Runs a single flash test that measures an average FPS value. | 1836 """Runs a single flash test that measures an average FPS value. |
1856 | 1837 |
1857 Args: | 1838 Args: |
1858 webpage_url: The string URL to a webpage that will run the test. | 1839 webpage_url: The string URL to a webpage that will run the test. |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2215 'start_page': 'src/tools/page_cycler/webpagereplay/start.html', | 2196 'start_page': 'src/tools/page_cycler/webpagereplay/start.html', |
2216 'extension': 'src/tools/page_cycler/webpagereplay/extension', | 2197 'extension': 'src/tools/page_cycler/webpagereplay/extension', |
2217 } | 2198 } |
2218 | 2199 |
2219 CHROME_FLAGS = webpagereplay.CHROME_FLAGS + [ | 2200 CHROME_FLAGS = webpagereplay.CHROME_FLAGS + [ |
2220 '--log-level=0', | 2201 '--log-level=0', |
2221 '--disable-background-networking', | 2202 '--disable-background-networking', |
2222 '--enable-experimental-extension-apis', | 2203 '--enable-experimental-extension-apis', |
2223 '--enable-logging', | 2204 '--enable-logging', |
2224 '--enable-benchmarking', | 2205 '--enable-benchmarking', |
2206 '--enable-stats-table', | |
nduca
2012/08/14 00:24:34
any reason this is added here?
dtu
2012/08/16 02:31:10
Chrome DCHECKS for --enable-stats-table if --enabl
| |
2225 '--metrics-recording-only', | 2207 '--metrics-recording-only', |
2226 '--activate-on-launch', | 2208 '--activate-on-launch', |
2227 '--no-first-run', | 2209 '--no-first-run', |
2228 '--no-proxy-server', | 2210 '--no-proxy-server', |
2229 ] | 2211 ] |
2230 | 2212 |
2231 @classmethod | 2213 @classmethod |
2232 def Path(cls, key, **kwargs): | 2214 def Path(cls, key, **kwargs): |
2233 return FormatChromePath(cls._PATHS[key], **kwargs) | 2215 return FormatChromePath(cls._PATHS[key], **kwargs) |
2234 | 2216 |
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2611 """Identifies the port number to which the server is currently bound. | 2593 """Identifies the port number to which the server is currently bound. |
2612 | 2594 |
2613 Returns: | 2595 Returns: |
2614 The numeric port number to which the server is currently bound. | 2596 The numeric port number to which the server is currently bound. |
2615 """ | 2597 """ |
2616 return self._server.server_address[1] | 2598 return self._server.server_address[1] |
2617 | 2599 |
2618 | 2600 |
2619 if __name__ == '__main__': | 2601 if __name__ == '__main__': |
2620 pyauto_functional.Main() | 2602 pyauto_functional.Main() |
OLD | NEW |