OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Basic pyauto performance tests. | 6 """Basic pyauto performance tests. |
7 | 7 |
8 For tests that need to be run for multiple iterations (e.g., so that average | 8 For tests that need to be run for multiple iterations (e.g., so that average |
9 and standard deviation values can be reported), the default number of iterations | 9 and standard deviation values can be reported), the default number of iterations |
10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. | 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. |
(...skipping 568 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
579 timings = [] | 579 timings = [] |
580 for iteration in range(self._num_iterations + 1): | 580 for iteration in range(self._num_iterations + 1): |
581 orig_timeout_count = self._timeout_count | 581 orig_timeout_count = self._timeout_count |
582 elapsed_time = self._MeasureElapsedTime(open_tab_command, | 582 elapsed_time = self._MeasureElapsedTime(open_tab_command, |
583 num_invocations=num_tabs) | 583 num_invocations=num_tabs) |
584 # Only count the timing measurement if no automation call timed out. | 584 # Only count the timing measurement if no automation call timed out. |
585 if self._timeout_count == orig_timeout_count: | 585 if self._timeout_count == orig_timeout_count: |
586 # Ignore the first iteration. | 586 # Ignore the first iteration. |
587 if iteration: | 587 if iteration: |
588 timings.append(elapsed_time) | 588 timings.append(elapsed_time) |
589 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 589 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
dennis_jeffrey
2012/08/16 17:50:59
What is the reason for adding the "+1" to each of
dtu
2012/08/16 19:12:39
Done. Sorry, you're right. The only loop that was
| |
590 self._num_iterations, elapsed_time) | 590 self._num_iterations, elapsed_time) |
591 self.assertTrue(self._timeout_count <= self._max_timeout_count, | 591 self.assertTrue(self._timeout_count <= self._max_timeout_count, |
592 msg='Test exceeded automation timeout threshold.') | 592 msg='Test exceeded automation timeout threshold.') |
593 self.assertEqual(1 + num_tabs, self.GetTabCount(), | 593 self.assertEqual(1 + num_tabs, self.GetTabCount(), |
594 msg='Did not open %d new tab(s).' % num_tabs) | 594 msg='Did not open %d new tab(s).' % num_tabs) |
595 for _ in range(num_tabs): | 595 for _ in range(num_tabs): |
596 self.CloseTab(tab_index=1) | 596 self.CloseTab(tab_index=1) |
597 | 597 |
598 self._PrintSummaryResults(description, timings, 'milliseconds', graph_name) | 598 self._PrintSummaryResults(description, timings, 'milliseconds', graph_name) |
599 | 599 |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
741 self.CloseTab(tab_index=1) | 741 self.CloseTab(tab_index=1) |
742 return result_dict | 742 return result_dict |
743 | 743 |
744 timings = {} | 744 timings = {} |
745 for iteration in xrange(self._num_iterations + 1): | 745 for iteration in xrange(self._num_iterations + 1): |
746 result_dict = _RunBenchmarkOnce(url) | 746 result_dict = _RunBenchmarkOnce(url) |
747 # Ignore the first iteration. | 747 # Ignore the first iteration. |
748 if iteration: | 748 if iteration: |
749 for key, val in result_dict.items(): | 749 for key, val in result_dict.items(): |
750 timings.setdefault(key, []).append(val) | 750 timings.setdefault(key, []).append(val) |
751 logging.info('Iteration %d of %d:\n%s', iteration, self._num_iterations, | 751 logging.info('Iteration %d of %d:\n%s', iteration + 1, |
752 self.pformat(result_dict)) | 752 self._num_iterations, self.pformat(result_dict)) |
753 | 753 |
754 for key, val in timings.items(): | 754 for key, val in timings.items(): |
755 if key == 'final_score': | 755 if key == 'final_score': |
756 self._PrintSummaryResults('V8Benchmark', val, 'score', | 756 self._PrintSummaryResults('V8Benchmark', val, 'score', |
757 'v8_benchmark_final') | 757 'v8_benchmark_final') |
758 else: | 758 else: |
759 self._PrintSummaryResults('V8Benchmark-%s' % key, val, 'score', | 759 self._PrintSummaryResults('V8Benchmark-%s' % key, val, 'score', |
760 'v8_benchmark_individual') | 760 'v8_benchmark_individual') |
761 | 761 |
762 def testSunSpider(self): | 762 def testSunSpider(self): |
(...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1570 self.DownloadAndWaitForStart(url) | 1570 self.DownloadAndWaitForStart(url) |
1571 self.WaitForAllDownloadsToComplete(timeout=2 * 60 * 1000) # 2 minutes. | 1571 self.WaitForAllDownloadsToComplete(timeout=2 * 60 * 1000) # 2 minutes. |
1572 | 1572 |
1573 timings = [] | 1573 timings = [] |
1574 for iteration in range(self._num_iterations + 1): | 1574 for iteration in range(self._num_iterations + 1): |
1575 elapsed_time = self._MeasureElapsedTime( | 1575 elapsed_time = self._MeasureElapsedTime( |
1576 lambda: _DownloadFile(DOWNLOAD_100MB_URL), num_invocations=1) | 1576 lambda: _DownloadFile(DOWNLOAD_100MB_URL), num_invocations=1) |
1577 # Ignore the first iteration. | 1577 # Ignore the first iteration. |
1578 if iteration: | 1578 if iteration: |
1579 timings.append(elapsed_time) | 1579 timings.append(elapsed_time) |
1580 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 1580 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
1581 self._num_iterations, elapsed_time) | 1581 self._num_iterations, elapsed_time) |
1582 self.SetDownloadShelfVisible(False) | 1582 self.SetDownloadShelfVisible(False) |
1583 _CleanupAdditionalFilesInDir(download_dir, orig_downloads) | 1583 _CleanupAdditionalFilesInDir(download_dir, orig_downloads) |
1584 | 1584 |
1585 self._PrintSummaryResults('Download100MBFile', timings, 'milliseconds', | 1585 self._PrintSummaryResults('Download100MBFile', timings, 'milliseconds', |
1586 'download_file') | 1586 'download_file') |
1587 | 1587 |
1588 # Tell the local server to delete the 100 MB file. | 1588 # Tell the local server to delete the 100 MB file. |
1589 self.NavigateToURL(DELETE_100MB_URL) | 1589 self.NavigateToURL(DELETE_100MB_URL) |
1590 | 1590 |
(...skipping 23 matching lines...) Expand all Loading... | |
1614 self.WaitUntil(_IsUploadComplete, timeout=120, expect_retval=True, | 1614 self.WaitUntil(_IsUploadComplete, timeout=120, expect_retval=True, |
1615 retry_sleep=0.10), | 1615 retry_sleep=0.10), |
1616 msg='Upload failed to complete before the timeout was hit.') | 1616 msg='Upload failed to complete before the timeout was hit.') |
1617 | 1617 |
1618 timings = [] | 1618 timings = [] |
1619 for iteration in range(self._num_iterations + 1): | 1619 for iteration in range(self._num_iterations + 1): |
1620 elapsed_time = self._MeasureElapsedTime(_RunSingleUpload) | 1620 elapsed_time = self._MeasureElapsedTime(_RunSingleUpload) |
1621 # Ignore the first iteration. | 1621 # Ignore the first iteration. |
1622 if iteration: | 1622 if iteration: |
1623 timings.append(elapsed_time) | 1623 timings.append(elapsed_time) |
1624 logging.info('Iteration %d of %d: %f milliseconds', iteration, | 1624 logging.info('Iteration %d of %d: %f milliseconds', iteration + 1, |
1625 self._num_iterations, elapsed_time) | 1625 self._num_iterations, elapsed_time) |
1626 | 1626 |
1627 self._PrintSummaryResults('Upload50MBFile', timings, 'milliseconds', | 1627 self._PrintSummaryResults('Upload50MBFile', timings, 'milliseconds', |
1628 'upload_file') | 1628 'upload_file') |
1629 | 1629 |
1630 | 1630 |
1631 class FrameTimes(object): | |
1632 """Container for a list of frame times.""" | |
1633 | |
1634 def __init__(self, frame_times): | |
1635 self._frame_times = frame_times | |
1636 | |
1637 def GetFps(self): | |
1638 if not self._frame_times: | |
1639 return 0 | |
1640 avg = sum(self._frame_times) / len(self._frame_times) | |
1641 if not avg: | |
1642 return 0 | |
1643 return int(1000.0 / avg) | |
1644 | |
1645 def GetMeanFrameTime(self): | |
1646 return Mean(self._frame_times) | |
1647 | |
1648 def GetPercentBelow60Fps(self): | |
1649 if not self._frame_times: | |
1650 return 0 | |
1651 threshold = math.ceil(1000 / 60.) | |
1652 num_frames_below_60 = len([t for t in self._frame_times if t > threshold]) | |
1653 num_frames = len(self._frame_times) | |
1654 return (100. * num_frames_below_60) / num_frames | |
1655 | |
1656 | |
1657 class ScrollResults(object): | 1631 class ScrollResults(object): |
1658 """Container for ScrollTest results.""" | 1632 """Container for ScrollTest results.""" |
1659 | 1633 |
1660 def __init__(self, first_paint_seconds, frame_times_lists): | 1634 def __init__(self, first_paint_seconds, results_list): |
1661 assert len(frame_times_lists) == 2, 'Expecting initial and repeat times' | 1635 assert len(results_list) == 2, 'Expecting initial and repeat results.' |
1662 self.first_paint_time = 1000.0 * first_paint_seconds | 1636 self._first_paint_time = 1000.0 * first_paint_seconds |
1663 self.initial_frame_times = FrameTimes(frame_times_lists[0]) | 1637 self._results_list = results_list |
1664 self.repeat_frame_times = FrameTimes(frame_times_lists[1]) | 1638 |
1639 def GetFirstPaintTime(self): | |
1640 return self._first_paint_time | |
1641 | |
1642 def GetFrameCount(self, index): | |
1643 results = self._results_list[index] | |
1644 return results.get('numFramesSentToScreen', results['numAnimationFrames']) | |
1645 | |
1646 def GetFps(self, index): | |
1647 return (self.GetFrameCount(index) / | |
1648 self._results_list[index]['totalTimeInSeconds']) | |
1649 | |
1650 def GetMeanFrameTime(self, index): | |
1651 return (self._results_list[index]['totalTimeInSeconds'] / | |
1652 self.GetFrameCount(index)) | |
1653 | |
1654 def GetPercentBelow60Fps(self, index): | |
1655 return (float(self._results_list[index]['droppedFrameCount']) / | |
1656 self.GetFrameCount(index)) | |
1665 | 1657 |
1666 | 1658 |
1667 class BaseScrollTest(BasePerfTest): | 1659 class BaseScrollTest(BasePerfTest): |
1668 """Base class for tests measuring scrolling performance.""" | 1660 """Base class for tests measuring scrolling performance.""" |
1669 | 1661 |
1670 def setUp(self): | 1662 def setUp(self): |
1671 """Performs necessary setup work before running each test.""" | 1663 """Performs necessary setup work before running each test.""" |
1672 super(BaseScrollTest, self).setUp() | 1664 super(BaseScrollTest, self).setUp() |
1673 scroll_file = os.path.join(self.DataDir(), 'scroll', 'scroll.js') | 1665 scroll_file = os.path.join(self.DataDir(), 'scroll', 'scroll.js') |
1674 with open(scroll_file) as f: | 1666 with open(scroll_file) as f: |
1675 self._scroll_text = f.read() | 1667 self._scroll_text = f.read() |
1676 | 1668 |
1677 def RunSingleInvocation(self, url, setup_js=''): | 1669 def ExtraChromeFlags(self): |
1670 """Ensures Chrome is launched with custom flags. | |
1671 | |
1672 Returns: | |
1673 A list of extra flags to pass to Chrome when it is launched. | |
1674 """ | |
1675 # Extra flag used by scroll performance tests. | |
1676 return (super(BaseScrollTest, self).ExtraChromeFlags() + | |
1677 ['--enable-gpu-benchmarking']) | |
1678 | |
1679 def RunSingleInvocation(self, url, is_gmail_test=False): | |
1678 """Runs a single invocation of the scroll test. | 1680 """Runs a single invocation of the scroll test. |
1679 | 1681 |
1680 Args: | 1682 Args: |
1681 url: The string url for the webpage on which to run the scroll test. | 1683 url: The string url for the webpage on which to run the scroll test. |
1682 setup_js: String representing additional Javascript setup code to execute | 1684 is_gmail_test: True iff the test is a GMail test. |
1683 in the webpage immediately before running the scroll test. | |
1684 | 1685 |
1685 Returns: | 1686 Returns: |
1686 Instance of ScrollResults. | 1687 Instance of ScrollResults. |
1687 """ | 1688 """ |
1689 | |
1688 self.assertTrue(self.AppendTab(pyauto.GURL(url)), | 1690 self.assertTrue(self.AppendTab(pyauto.GURL(url)), |
1689 msg='Failed to append tab for webpage.') | 1691 msg='Failed to append tab for webpage.') |
1690 | 1692 |
1691 js = """ | 1693 timeout = pyauto.PyUITest.ActionTimeoutChanger(self, 300 * 1000) # ms |
1692 %s | 1694 test_js = """%s; |
1693 %s | 1695 var __stringify = JSON.stringify || JSON.encode; |
1694 __scroll_test(); | 1696 __scrollTest(function(results) { |
1695 window.domAutomationController.send('done'); | 1697 window.domAutomationController.send(__stringify(results)); |
1696 """ % (self._scroll_text, setup_js) | 1698 }, %s); |
1697 self.ExecuteJavascript(js, tab_index=1) | 1699 """ % (self._scroll_text, 'true' if is_gmail_test else 'false') |
1700 results = simplejson.loads(self.ExecuteJavascript(test_js, tab_index=1)) | |
1698 | 1701 |
1699 # Poll the webpage until the test is complete. | 1702 first_paint_js = ('window.domAutomationController.send(' |
1700 def IsTestComplete(): | 1703 '(chrome.loadTimes().firstPaintTime - ' |
1701 done_js = """ | 1704 'chrome.loadTimes().requestTime).toString());') |
1702 if (__scrolling_complete) | 1705 first_paint_time = float(self.ExecuteJavascript(first_paint_js, |
1703 window.domAutomationController.send('complete'); | 1706 tab_index=1)) |
1704 else | |
1705 window.domAutomationController.send('incomplete'); | |
1706 """ | |
1707 return self.ExecuteJavascript(done_js, tab_index=1) == 'complete' | |
1708 | 1707 |
1709 self.assertTrue( | 1708 self.CloseTab(tab_index=1) |
1710 self.WaitUntil(IsTestComplete, timeout=300, expect_retval=True, | |
1711 retry_sleep=1), | |
1712 msg='Timed out when waiting for scrolling tests to complete.') | |
1713 | 1709 |
1714 # Get the scroll test results from the webpage. | 1710 return ScrollResults(first_paint_time, results) |
1715 results_js = """ | |
1716 var __stringify = JSON.stringify || JSON.encode; | |
1717 window.domAutomationController.send(__stringify({ | |
1718 'first_paint_time': chrome.loadTimes().firstPaintTime - | |
1719 chrome.loadTimes().requestTime, | |
1720 'frame_times': __frame_times, | |
1721 })); | |
1722 """ | |
1723 results = eval(self.ExecuteJavascript(results_js, tab_index=1)) | |
1724 self.CloseTab(tab_index=1) | |
1725 return ScrollResults(results['first_paint_time'], results['frame_times']) | |
1726 | 1711 |
1727 def RunScrollTest(self, url, description, graph_name, setup_js=''): | 1712 def RunScrollTest(self, url, description, graph_name, is_gmail_test=False): |
1728 """Runs a scroll performance test on the specified webpage. | 1713 """Runs a scroll performance test on the specified webpage. |
1729 | 1714 |
1730 Args: | 1715 Args: |
1731 url: The string url for the webpage on which to run the scroll test. | 1716 url: The string url for the webpage on which to run the scroll test. |
1732 description: A string description for the particular test being run. | 1717 description: A string description for the particular test being run. |
1733 graph_name: A string name for the performance graph associated with this | 1718 graph_name: A string name for the performance graph associated with this |
1734 test. Only used on Chrome desktop. | 1719 test. Only used on Chrome desktop. |
1735 setup_js: String representing additional Javascript setup code to execute | 1720 is_gmail_test: True iff the test is a GMail test. |
1736 in the webpage immediately before running the scroll test. | |
1737 """ | 1721 """ |
1738 results = [] | 1722 results = [] |
1739 for iteration in range(self._num_iterations + 1): | 1723 for iteration in range(self._num_iterations + 1): |
1740 result = self.RunSingleInvocation(url, setup_js) | 1724 result = self.RunSingleInvocation(url, is_gmail_test) |
1741 # Ignore the first iteration. | 1725 # Ignore the first iteration. |
1742 if iteration: | 1726 if iteration: |
1743 fps = result.repeat_frame_times.GetFps() | 1727 fps = result.GetFps(1) |
1744 assert fps, '%s did not scroll' % url | 1728 assert fps, '%s did not scroll' % url |
1745 logging.info('Iteration %d of %d: %f fps', iteration, | 1729 logging.info('Iteration %d of %d: %f fps', iteration + 1, |
1746 self._num_iterations, fps) | 1730 self._num_iterations, fps) |
1747 results.append(result) | 1731 results.append(result) |
1748 self._PrintSummaryResults( | 1732 self._PrintSummaryResults( |
1749 description, [r.repeat_frame_times.GetFps() for r in results], | 1733 description, [r.GetFps(1) for r in results], |
1750 'FPS', graph_name) | 1734 'FPS', graph_name) |
1751 | 1735 |
1752 | 1736 |
1753 class PopularSitesScrollTest(BaseScrollTest): | 1737 class PopularSitesScrollTest(BaseScrollTest): |
1754 """Measures scrolling performance on recorded versions of popular sites.""" | 1738 """Measures scrolling performance on recorded versions of popular sites.""" |
1755 | 1739 |
1756 def ExtraChromeFlags(self): | 1740 def ExtraChromeFlags(self): |
1757 """Ensures Chrome is launched with custom flags. | 1741 """Ensures Chrome is launched with custom flags. |
1758 | 1742 |
1759 Returns: | 1743 Returns: |
1760 A list of extra flags to pass to Chrome when it is launched. | 1744 A list of extra flags to pass to Chrome when it is launched. |
1761 """ | 1745 """ |
1762 return super(PopularSitesScrollTest, | 1746 return super(PopularSitesScrollTest, |
1763 self).ExtraChromeFlags() + PageCyclerReplay.CHROME_FLAGS | 1747 self).ExtraChromeFlags() + PageCyclerReplay.CHROME_FLAGS |
1764 | 1748 |
1765 def _GetUrlList(self, test_name): | 1749 def _GetUrlList(self, test_name): |
1766 """Returns list of recorded sites.""" | 1750 """Returns list of recorded sites.""" |
1767 sites_path = PageCyclerReplay.Path('page_sets', test_name=test_name) | 1751 sites_path = PageCyclerReplay.Path('page_sets', test_name=test_name) |
1768 with open(sites_path) as f: | 1752 with open(sites_path) as f: |
1769 sites_text = f.read() | 1753 sites_text = f.read() |
1770 js = """ | 1754 js = """ |
1771 %s | 1755 %s |
1772 window.domAutomationController.send(JSON.stringify(pageSets)); | 1756 window.domAutomationController.send(JSON.stringify(pageSets)); |
1773 """ % sites_text | 1757 """ % sites_text |
1774 page_sets = eval(self.ExecuteJavascript(js)) | 1758 page_sets = eval(self.ExecuteJavascript(js)) |
1775 return list(itertools.chain(*page_sets))[1:] # Skip first. | 1759 return list(itertools.chain(*page_sets))[1:] # Skip first. |
1776 | 1760 |
1777 def _PrintScrollResults(self, results): | 1761 def _PrintScrollResults(self, results): |
1778 self._PrintSummaryResults( | 1762 self._PrintSummaryResults( |
1779 'initial', [r.initial_frame_times.GetMeanFrameTime() for r in results], | 1763 'initial', [r.GetMeanFrameTime(0) for r in results], |
1780 'ms', 'FrameTimes') | 1764 'ms', 'FrameTimes') |
1781 self._PrintSummaryResults( | 1765 self._PrintSummaryResults( |
1782 'repeat', [r.repeat_frame_times.GetMeanFrameTime() for r in results], | 1766 'repeat', [r.GetMeanFrameTime(1) for r in results], |
1783 'ms', 'FrameTimes') | 1767 'ms', 'FrameTimes') |
1784 self._PrintSummaryResults( | 1768 self._PrintSummaryResults( |
1785 'initial', | 1769 'initial', |
1786 [r.initial_frame_times.GetPercentBelow60Fps() for r in results], | 1770 [r.GetPercentBelow60Fps(0) for r in results], |
1787 'percent', 'PercentBelow60FPS') | 1771 'percent', 'PercentBelow60FPS') |
1788 self._PrintSummaryResults( | 1772 self._PrintSummaryResults( |
1789 'repeat', | 1773 'repeat', |
1790 [r.repeat_frame_times.GetPercentBelow60Fps() for r in results], | 1774 [r.GetPercentBelow60Fps(1) for r in results], |
1791 'percent', 'PercentBelow60FPS') | 1775 'percent', 'PercentBelow60FPS') |
1792 self._PrintSummaryResults( | 1776 self._PrintSummaryResults( |
1793 'first_paint_time', [r.first_paint_time for r in results], | 1777 'first_paint_time', [r.GetFirstPaintTime() for r in results], |
1794 'ms', 'FirstPaintTime') | 1778 'ms', 'FirstPaintTime') |
1795 | 1779 |
1796 def test2012Q3(self): | 1780 def test2012Q3(self): |
1797 test_name = '2012Q3' | 1781 test_name = '2012Q3' |
1798 urls = self._GetUrlList(test_name) | 1782 urls = self._GetUrlList(test_name) |
1799 results = [] | 1783 results = [] |
1800 with PageCyclerReplay.ReplayServer(test_name) as replay_server: | 1784 with PageCyclerReplay.ReplayServer(test_name) as replay_server: |
1801 if replay_server.is_record_mode: | 1785 if replay_server.is_record_mode: |
1802 self._num_iterations = 1 | 1786 self._num_iterations = 1 |
1803 for iteration in range(self._num_iterations): | 1787 for iteration in range(self._num_iterations): |
1804 for url in urls: | 1788 for url in urls: |
1805 result = self.RunSingleInvocation(url) | 1789 result = self.RunSingleInvocation(url) |
1806 fps = result.initial_frame_times.GetFps() | 1790 fps = result.GetFps(0) |
1807 assert fps, '%s did not scroll' % url | 1791 assert fps, '%s did not scroll' % url |
1808 logging.info('Iteration %d of %d: %f fps', iteration, | 1792 logging.info('Iteration %d of %d: %f fps', iteration + 1, |
1809 self._num_iterations, fps) | 1793 self._num_iterations, fps) |
1810 results.append(result) | 1794 results.append(result) |
1811 self._PrintScrollResults(results) | 1795 self._PrintScrollResults(results) |
1812 | 1796 |
1813 | 1797 |
1814 class ScrollTest(BaseScrollTest): | 1798 class ScrollTest(BaseScrollTest): |
1815 """Tests to measure scrolling performance.""" | 1799 """Tests to measure scrolling performance.""" |
1816 | 1800 |
1817 def ExtraChromeFlags(self): | 1801 def ExtraChromeFlags(self): |
1818 """Ensures Chrome is launched with custom flags. | 1802 """Ensures Chrome is launched with custom flags. |
(...skipping 18 matching lines...) Expand all Loading... | |
1837 | 1821 |
1838 def testGooglePlusScroll(self): | 1822 def testGooglePlusScroll(self): |
1839 """Runs the scroll test on a Google Plus anonymized page.""" | 1823 """Runs the scroll test on a Google Plus anonymized page.""" |
1840 self.RunScrollTest( | 1824 self.RunScrollTest( |
1841 self.GetFileURLForDataPath('scroll', 'plus.html'), | 1825 self.GetFileURLForDataPath('scroll', 'plus.html'), |
1842 'ScrollGooglePlusPage', 'scroll_fps') | 1826 'ScrollGooglePlusPage', 'scroll_fps') |
1843 | 1827 |
1844 def testGmailScroll(self): | 1828 def testGmailScroll(self): |
1845 """Runs the scroll test using the live Gmail site.""" | 1829 """Runs the scroll test using the live Gmail site.""" |
1846 self._LoginToGoogleAccount(account_key='test_google_account_gmail') | 1830 self._LoginToGoogleAccount(account_key='test_google_account_gmail') |
1847 self.RunScrollTest('http://www.gmail.com', 'ScrollGmail', 'scroll_fps', | 1831 self.RunScrollTest('http://www.gmail.com', 'ScrollGmail', |
1848 setup_js='__is_gmail_test = true;') | 1832 'scroll_fps', True) |
1849 | 1833 |
1850 | 1834 |
1851 class FlashTest(BasePerfTest): | 1835 class FlashTest(BasePerfTest): |
1852 """Tests to measure flash performance.""" | 1836 """Tests to measure flash performance.""" |
1853 | 1837 |
1854 def _RunFlashTestForAverageFPS(self, webpage_url, description, graph_name): | 1838 def _RunFlashTestForAverageFPS(self, webpage_url, description, graph_name): |
1855 """Runs a single flash test that measures an average FPS value. | 1839 """Runs a single flash test that measures an average FPS value. |
1856 | 1840 |
1857 Args: | 1841 Args: |
1858 webpage_url: The string URL to a webpage that will run the test. | 1842 webpage_url: The string URL to a webpage that will run the test. |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2215 'start_page': 'src/tools/page_cycler/webpagereplay/start.html', | 2199 'start_page': 'src/tools/page_cycler/webpagereplay/start.html', |
2216 'extension': 'src/tools/page_cycler/webpagereplay/extension', | 2200 'extension': 'src/tools/page_cycler/webpagereplay/extension', |
2217 } | 2201 } |
2218 | 2202 |
2219 CHROME_FLAGS = webpagereplay.CHROME_FLAGS + [ | 2203 CHROME_FLAGS = webpagereplay.CHROME_FLAGS + [ |
2220 '--log-level=0', | 2204 '--log-level=0', |
2221 '--disable-background-networking', | 2205 '--disable-background-networking', |
2222 '--enable-experimental-extension-apis', | 2206 '--enable-experimental-extension-apis', |
2223 '--enable-logging', | 2207 '--enable-logging', |
2224 '--enable-benchmarking', | 2208 '--enable-benchmarking', |
2209 '--enable-stats-table', | |
dennis_jeffrey
2012/08/16 17:50:59
This was removed here:
https://chromiumcodereview
dtu
2012/08/16 19:12:39
Oh, interesting.
The reason is, when you have --e
slamm_google
2012/08/16 19:33:52
I was mistaken about --enable-benchmarking not bei
dennis_jeffrey
2012/08/16 21:00:36
So if we remove --enable-stats-table, then we'll h
| |
2225 '--metrics-recording-only', | 2210 '--metrics-recording-only', |
2226 '--activate-on-launch', | 2211 '--activate-on-launch', |
2227 '--no-first-run', | 2212 '--no-first-run', |
2228 '--no-proxy-server', | 2213 '--no-proxy-server', |
2229 ] | 2214 ] |
2230 | 2215 |
2231 @classmethod | 2216 @classmethod |
2232 def Path(cls, key, **kwargs): | 2217 def Path(cls, key, **kwargs): |
2233 return FormatChromePath(cls._PATHS[key], **kwargs) | 2218 return FormatChromePath(cls._PATHS[key], **kwargs) |
2234 | 2219 |
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2611 """Identifies the port number to which the server is currently bound. | 2596 """Identifies the port number to which the server is currently bound. |
2612 | 2597 |
2613 Returns: | 2598 Returns: |
2614 The numeric port number to which the server is currently bound. | 2599 The numeric port number to which the server is currently bound. |
2615 """ | 2600 """ |
2616 return self._server.server_address[1] | 2601 return self._server.server_address[1] |
2617 | 2602 |
2618 | 2603 |
2619 if __name__ == '__main__': | 2604 if __name__ == '__main__': |
2620 pyauto_functional.Main() | 2605 pyauto_functional.Main() |
OLD | NEW |