Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(372)

Side by Side Diff: chrome/test/functional/perf.py

Issue 10411011: Add Web Page Replay enabled page cycler tests to pyauto. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Review fixes & first working version. Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Basic pyauto performance tests. 6 """Basic pyauto performance tests.
7 7
8 For tests that need to be run for multiple iterations (e.g., so that average 8 For tests that need to be run for multiple iterations (e.g., so that average
9 and standard deviation values can be reported), the default number of iterations 9 and standard deviation values can be reported), the default number of iterations
10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|.
(...skipping 30 matching lines...) Expand all
41 import urllib2 41 import urllib2
42 import urlparse 42 import urlparse
43 43
44 import pyauto_functional # Must be imported before pyauto. 44 import pyauto_functional # Must be imported before pyauto.
45 import pyauto 45 import pyauto
46 import simplejson # Must be imported after pyauto; located in third_party. 46 import simplejson # Must be imported after pyauto; located in third_party.
47 47
48 from netflix import NetflixTestHelper 48 from netflix import NetflixTestHelper
49 import pyauto_utils 49 import pyauto_utils
50 import test_utils 50 import test_utils
51 import webpagereplay
51 from youtube import YoutubeTestHelper 52 from youtube import YoutubeTestHelper
52 53
53 54
55 def Mean(values):
56 """Return the arithmetic mean of |values|."""
57 return sum(values) / float(len(values))
tonyg 2012/05/19 00:46:36 Needs a base case for 0-length |values|.
slamm_google 2012/05/21 22:22:06 Done. I also added a check for 'None' values.
58
59
60 def GeometricMean(values):
61 """Return the geometric mean of |values|."""
62 if not values or [x for x in values if x < 0.0]:
63 return None
64 if 0.0 in values:
65 return 0.0
66 return math.exp(Mean([math.log(x) for x in values]))
67
68
54 class BasePerfTest(pyauto.PyUITest): 69 class BasePerfTest(pyauto.PyUITest):
55 """Base class for performance tests.""" 70 """Base class for performance tests."""
56 71
57 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py. 72 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py.
58 _DEFAULT_MAX_TIMEOUT_COUNT = 10 73 _DEFAULT_MAX_TIMEOUT_COUNT = 10
59 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_' 74 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_'
60 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_' 75 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_'
61 76
62 def setUp(self): 77 def setUp(self):
63 """Performs necessary setup work before running each test.""" 78 """Performs necessary setup work before running each test."""
(...skipping 1662 matching lines...) Expand 10 before | Expand all | Expand 10 after
1726 logging.info('Total v8 heap size: %f MB', v8_heap_size) 1741 logging.info('Total v8 heap size: %f MB', v8_heap_size)
1727 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB', 1742 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB',
1728 graph_name + '_v8_heap_size') 1743 graph_name + '_v8_heap_size')
1729 1744
1730 def testAngryBirds(self): 1745 def testAngryBirds(self):
1731 """Measures performance for Angry Birds.""" 1746 """Measures performance for Angry Birds."""
1732 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds', 1747 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds',
1733 'AngryBirds', 'angry_birds') 1748 'AngryBirds', 'angry_birds')
1734 1749
1735 1750
1736 class PageCyclerTest(BasePerfTest): 1751 class BasePageCyclerTest(BasePerfTest):
1737 """Tests to run various page cyclers.""" 1752 """Page class for page cycler tests.
1753
1754 Derived classes must implement StartUrl().
1755 """
1756 TRIM_PERCENT = 20
1738 1757
1739 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data 1758 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data
1740 PC_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, 1759 DATA_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir,
1741 os.pardir, 'data', 'page_cycler') 1760 os.pardir, 'data', 'page_cycler')
1761
1762 @classmethod
1763 def DataPath(cls, subdir):
1764 return os.path.join(cls.DATA_PATH, subdir)
1742 1765
1743 def ExtraChromeFlags(self): 1766 def ExtraChromeFlags(self):
1744 """Ensures Chrome is launched with custom flags. 1767 """Ensures Chrome is launched with custom flags.
1745 1768
1746 Returns: 1769 Returns:
1747 A list of extra flags to pass to Chrome when it is launched. 1770 A list of extra flags to pass to Chrome when it is launched.
1748 """ 1771 """
1749 # Extra flags required to run these tests. 1772 # Extra flags required to run these tests.
1750 # The first two are needed for the test. 1773 # The first two are needed for the test.
1751 # The plugins argument is to prevent bad scores due to pop-ups from 1774 # The plugins argument is to prevent bad scores due to pop-ups from
1752 # running an old version of something (like Flash). 1775 # running an old version of something (like Flash).
1753 return (super(PageCyclerTest, self).ExtraChromeFlags() + 1776 return (super(BasePageCyclerTest, self).ExtraChromeFlags() +
1754 ['--js-flags="--expose_gc"', 1777 ['--js-flags="--expose_gc"',
1755 '--enable-file-cookies', 1778 '--enable-file-cookies',
1756 '--allow-outdated-plugins']) 1779 '--allow-outdated-plugins'])
1757 1780
1758 def _PreReadDir(self, dir): 1781 def WaitUntilDone(self, url, iterations):
1782 """Check cookies for "__pc_done=1" to know the test is over."""
1783 def IsDone():
1784 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
1785 return '__pc_done=1' in cookies
1786 self.assertTrue(
1787 self.WaitUntil(IsDone, timeout=(60 * iterations), retry_sleep=1),
1788 msg='Timed out waiting for page cycler test to complete.')
1789
1790 def CollectPagesAndTimes(self, url):
1791 """Collect the results from the cookies."""
1792 pages, times = None, None
1793 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
1794 for cookie in cookies.split(';'):
1795 if '__pc_pages' in cookie:
1796 pages_str = cookie.split('=', 1)[1]
1797 pages = pages_str.split(',')
1798 elif '__pc_timings' in cookie:
1799 times_str = cookie.split('=', 1)[1]
1800 times = [float(t) for t in times_str.split(',')]
1801 self.assertTrue(pages and times,
1802 msg='Unable to find test results in cookies: %s' % cookies)
1803 return pages, times
1804
1805 def IteratePageTimes(self, times, num_pages, iterations):
1806 """Regroup the times by the page.
1807
1808 Args:
1809 times: e.g. [page1_iter1, page1_iter2, ..., page2_iter1, page2_iter2, ...]
1810 num_pages: the number of pages
1811 iterations: the number of times for each page
1812 Yields:
1813 times for one page: [page1_iter1, page1_iter2, ...]
1814 """
1815 expected_num_times = num_pages * iterations
1816 self.assertEqual(
1817 expected_num_times, len(times),
1818 msg=('len(times) != num_pages * iterations: %s != %s * %s, times=%s' %
1819 (len(times), num_pages, iterations, times)))
1820
1821 next_time = iter(times).next
1822 for _ in range(num_pages):
1823 yield [next_time() for _ in range(iterations)]
1824
1825 def TrimTimes(self, times, percent):
1826 """Return a new list with |percent| number of times trimmed for each page.
1827
1828 Removes the largest and smallest values.
1829 """
1830 iterations = len(times)
1831 times = sorted(times)
1832 num_to_trim = int(iterations * float(percent) / 100.0)
1833 logging.debug('Before trimming %d: %s' % (num_to_trim, times))
1834 a = num_to_trim / 2
1835 b = iterations - (num_to_trim / 2 + num_to_trim % 2)
1836 trimmed_times = times[a:b]
1837 logging.debug('After trimming: %s', trimmed_times)
1838 return trimmed_times
1839
1840 def ComputeFinalResult(self, times, num_pages, iterations):
1841 """The final score that is calculated is a geometric mean of the
1842 arithmetic means of each page's load time, and we drop the
1843 upper/lower 20% of the times for each page so they don't skew the
1844 mean. The geometric mean is used for the final score because the
1845 time range for any given site may be very different, and we don't
1846 want slower sites to weight more heavily than others.
1847 """
1848 page_means = [
1849 Mean(self.TrimTimes(times, percent=self.TRIM_PERCENT))
1850 for times in self.IteratePageTimes(times, num_pages, iterations)]
1851 return GeometricMean(page_means)
1852
1853 def StartUrl(self, test_name, iterations):
1854 """Return the URL to used to start the test.
1855
1856 Derived must implement this.
1857 """
1858 raise NotImplemented
1859
1860 def RunPageCyclerTest(self, name, description):
1861 """Runs the specified PageCycler test.
1862
1863 Args:
1864 name: the page cycler test name (corresponds to a directory or test file)
1865 description: a string description for the test
1866 """
1867 iterations = self._num_iterations
1868 start_url = self.StartUrl(name, iterations)
1869 self.NavigateToURL(start_url)
1870 self.WaitUntilDone(start_url, iterations)
1871 pages, times = self.CollectPagesAndTimes(start_url)
1872 final_result = self.ComputeFinalResult(times, len(pages), iterations)
1873 logging.info('%s page cycler final result: %f' %
1874 (description, final_result))
1875 self._OutputPerfGraphValue(description + '_PageCycler', final_result,
1876 'milliseconds', graph_name='PageCycler')
1877
1878
1879 class PageCyclerTest(BasePageCyclerTest):
1880 """Tests to run various page cyclers."""
1881
1882 def _PreReadDataDir(self, subdir):
1759 """This recursively reads all of the files in a given url directory. 1883 """This recursively reads all of the files in a given url directory.
1760 1884
1761 The intent is to get them into memory before they are used by the benchmark. 1885 The intent is to get them into memory before they are used by the benchmark.
1886
1887 Args:
1888 subdir: a subdirectory of the page cycler data directory.
1762 """ 1889 """
1763 def _PreReadDir(dirname, names): 1890 def _PreReadDir(dirname, names):
1764 for rfile in names: 1891 for rfile in names:
1765 with open(os.path.join(dirname, rfile)) as fp: 1892 with open(os.path.join(dirname, rfile)) as fp:
1766 fp.read() 1893 fp.read()
1767 1894 for root, dirs, files in os.walk(self.DataPath(subdir)):
1768 for root, dirs, files in os.walk(os.path.dirname(dir)):
1769 _PreReadDir(root, files) 1895 _PreReadDir(root, files)
1770 1896
1771 def setUp(self): 1897 def StartUrl(self, test_name, iterations):
1772 self._PreReadDir(os.path.join(self.PC_PATH, 'common')) 1898 return self.GetFileURLForDataPath(
1773 BasePerfTest.setUp(self) 1899 self.DataPath(test_name),
1774 1900 'start.html?auto=1&iterations=%d' % iterations)
1775 def _RunPageCyclerTest(self, dirname, iterations, description): 1901
1902 def RunPageCyclerTest(self, dirname, description):
1776 """Runs the specified PageCycler test. 1903 """Runs the specified PageCycler test.
1777 1904
1778 The final score that is calculated is a geometric mean of the 1905 Args:
1779 arithmetic means of each site's load time, and we drop the upper 1906 dirname: directory containing the page cycler test
1780 20% of the times for each site so they don't skew the mean. 1907 description: a string description for the test
1781 The Geometric mean is used for the final score because the time 1908 """
1782 range for any given site may be very different, and we don't want 1909 self._PreReadDataDir('common')
1783 slower sites to weight more heavily than others. 1910 self._PreReadDataDir(dirname)
1784 1911 super(PageCyclerTest, self).RunPageCyclerTest(dirname, description)
1785 Args:
1786 dirname: The directory containing the page cycler test.
1787 iterations: How many times to run through the set of pages.
1788 description: A string description for the particular test being run.
1789 """
1790 self._PreReadDir(os.path.join(self.PC_PATH, dirname))
1791
1792 url = self.GetFileURLForDataPath(os.path.join(self.PC_PATH, dirname),
1793 'start.html')
1794
1795 self.NavigateToURL('%s?auto=1&iterations=%d' % (url, iterations))
1796
1797 # Check cookies for "__pc_done=1" to know the test is over.
1798 def IsTestDone():
1799 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0.
1800 return '__pc_done=1' in cookies
1801
1802 self.assertTrue(
1803 self.WaitUntil(IsTestDone, timeout=(60 * iterations), retry_sleep=1),
1804 msg='Timed out waiting for page cycler test to complete.')
1805
1806 # Collect the results from the cookies.
1807 site_to_time_list = {}
1808 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0.
1809 site_list = ''
1810 time_list = ''
1811 for cookie in cookies.split(';'):
1812 if '__pc_pages' in cookie:
1813 site_list = cookie[cookie.find('=') + 1:]
1814 elif '__pc_timings' in cookie:
1815 time_list = cookie[cookie.find('=') + 1:]
1816 self.assertTrue(site_list and time_list,
1817 msg='Could not find test results in cookies: %s' % cookies)
1818 site_list = site_list.split(',')
1819 time_list = time_list.split(',')
1820 self.assertEqual(iterations, len(time_list) / len(site_list),
1821 msg='Iteration count %d does not match with site/timing '
1822 'lists: %s and %s' % (iterations, site_list, time_list))
1823 for site_index, site in enumerate(site_list):
1824 site_to_time_list[site] = []
1825 for iteration_index in xrange(iterations):
1826 site_to_time_list[site].append(
1827 float(time_list[iteration_index * len(site_list) + site_index]))
1828
1829 site_times = []
1830 for site, time_list in site_to_time_list.iteritems():
1831 sorted_times = sorted(time_list)
1832 num_to_drop = int(len(sorted_times) * 0.2)
1833 logging.debug('Before dropping %d: ' % num_to_drop)
1834 logging.debug(sorted_times)
1835 if num_to_drop:
1836 sorted_times = sorted_times[:-num_to_drop]
1837 logging.debug('After dropping:')
1838 logging.debug(sorted_times)
1839 # Do an arithmetic mean of the load times for a given page.
1840 mean_time = sum(sorted_times) / len(sorted_times)
1841 logging.debug('Mean time is: ' + str(mean_time))
1842 site_times.append(mean_time)
1843
1844 logging.info('site times = %s' % site_times)
1845 # Compute a geometric mean over the averages for each site.
1846 final_result = reduce(lambda x, y: x * y,
1847 site_times) ** (1.0/ len(site_times))
1848 logging.info('%s page cycler final result: %f' %
1849 (description, final_result))
1850 self._OutputPerfGraphValue(description + '_PageCycler', final_result,
1851 'milliseconds', graph_name='PageCycler')
1852 1912
1853 def testMoreJSFile(self): 1913 def testMoreJSFile(self):
1854 self._RunPageCyclerTest('morejs', self._num_iterations, 'MoreJSFile') 1914 self.RunPageCyclerTest('morejs', 'MoreJSFile')
1855 1915
1856 def testAlexaFile(self): 1916 def testAlexaFile(self):
1857 self._RunPageCyclerTest('alexa_us', self._num_iterations, 'Alexa_usFile') 1917 self.RunPageCyclerTest('alexa_us', 'Alexa_usFile')
1858 1918
1859 def testBloatFile(self): 1919 def testBloatFile(self):
1860 self._RunPageCyclerTest('bloat', self._num_iterations, 'BloatFile') 1920 self.RunPageCyclerTest('bloat', 'BloatFile')
1861 1921
1862 def testDHTMLFile(self): 1922 def testDHTMLFile(self):
1863 self._RunPageCyclerTest('dhtml', self._num_iterations, 'DhtmlFile') 1923 self.RunPageCyclerTest('dhtml', 'DhtmlFile')
1864 1924
1865 def testIntl1File(self): 1925 def testIntl1File(self):
1866 self._RunPageCyclerTest('intl1', self._num_iterations, 'Intl1File') 1926 self.RunPageCyclerTest('intl1', 'Intl1File')
1867 1927
1868 def testIntl2File(self): 1928 def testIntl2File(self):
1869 self._RunPageCyclerTest('intl2', self._num_iterations, 'Intl2File') 1929 self.RunPageCyclerTest('intl2', 'Intl2File')
1870 1930
1871 def testMozFile(self): 1931 def testMozFile(self):
1872 self._RunPageCyclerTest('moz', self._num_iterations, 'MozFile') 1932 self.RunPageCyclerTest('moz', 'MozFile')
1873 1933
1874 def testMoz2File(self): 1934 def testMoz2File(self):
1875 self._RunPageCyclerTest('moz2', self._num_iterations, 'Moz2File') 1935 self.RunPageCyclerTest('moz2', 'Moz2File')
1936
1937
1938 class WebPageReplayPageCyclerTest(BasePageCyclerTest):
1939 """Tests to run Web Page Replay backed page cycler tests.
1940
1941 Web Page Replay is a proxy that can record and "replay" web pages with
1942 simulated network characteristics -- without having to edit the pages
1943 by hand. With WPR, tests can use "real" web content, and catch
1944 performance issues that may result from introducing network delays and
1945 bandwidth throttling.
1946 """
1947 _PATHS = {
1948 'archives': 'src/data/page_cycler/webpagereplay',
1949 'wpr': 'src/data/page_cycler/webpagereplay/{test_name}.wpr',
1950 'wpr_pub': 'src/tools/page_cycler/webpagereplay/tests/{test_name}.wpr',
1951 'start_page': 'src/tools/page_cycler/webpagereplay/start.html',
1952 'extension': 'src/tools/page_cycler/webpagereplay/extension',
1953 'replay': 'src/third_party/webpagereplay',
1954 'logs': 'src/webpagereplay_logs',
1955 }
1956
1957 _BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
1958 '..', '..', '..', '..'))
1959 _IS_DNS_FORWARDED = False
1960
1961 @classmethod
1962 def _Path(cls, key, **kwargs):
1963 """Provide paths for page cycler tests with Web Page Replay."""
1964 chromium_path = cls._PATHS[key].format(**kwargs)
1965 return os.path.join(cls._BASE_DIR, *chromium_path.split('/'))
1966
1967 @classmethod
1968 def _ArchivePath(cls, test_name):
1969 has_private_archives = os.path.exists(cls._Path('archives'))
1970 key = 'wpr' if has_private_archives else 'wpr_pub'
1971 return cls._Path(key, test_name=test_name)
1972
1973 def ExtraChromeFlags(self):
1974 """Ensures Chrome is launched with custom flags.
1975
1976 Returns:
1977 A list of extra flags to pass to Chrome when it is launched.
1978 """
1979 flags = super(WebPageReplayPageCyclerTest, self).ExtraChromeFlags()
1980 flags.append('--load-extension=%s' % self._Path('extension'))
1981 if not self._IS_DNS_FORWARDED:
1982 flags.append('--host-resolver-rules=MAP * %s' % webpagereplay.REPLAY_HOST)
1983 flags.extend([
1984 '--testing-fixed-http-port=%s' % webpagereplay.HTTP_PORT,
1985 '--testing-fixed-https-port=%s' % webpagereplay.HTTPS_PORT,
1986 '--log-level=0',
1987 ])
1988 extra_flags = [
1989 '--disable-background-networking',
1990 '--enable-experimental-extension-apis',
1991 '--enable-logging',
1992 '--enable-stats-table',
1993 '--enable-benchmarking',
1994 '--ignore-certificate-errors',
1995 '--metrics-recording-only',
1996 '--activate-on-launch',
1997 '--no-first-run',
1998 '--no-proxy-server',
1999 ]
2000 flags.extend(f for f in extra_flags if f not in flags)
2001 return flags
2002
2003 def StartUrl(self, test_name, iterations):
2004 start_url = 'file://%s?test=%s' % (self._Path('start_page'), test_name)
2005 if iterations is not None:
2006 start_url += '&iterations=%d' % iterations
2007 use_auto = True # TODO(slamm): get from env?
tonyg 2012/05/19 00:46:36 Sounds reasonable to get from env. Other things in
slamm_google 2012/05/21 22:22:06 Done. The code looks for 'PC_NO_AUTO' and 'PC_REC
2008 if use_auto:
2009 start_url += '&auto=1'
2010 return start_url
2011
2012 def RunPageCyclerTest(self, test_name, description):
2013 """Runs the specified PageCycler test.
2014
2015 Args:
2016 test_name: name for archive (.wpr) and config (.js) files.
2017 description: a string description for the test
2018 """
2019 replay_options = []
2020 if not self._IS_DNS_FORWARDED:
2021 replay_options.append('--no-dns_forwarding')
2022 is_record_mode = False # TODO(slamm): get from environment variable?
2023 if is_record_mode:
2024 replay_options.append('--record')
2025 with webpagereplay.ReplayServer(
2026 self._Path('replay'),
2027 self._ArchivePath(test_name),
2028 self._Path('logs'),
2029 replay_options):
2030 super_self = super(WebPageReplayPageCyclerTest, self)
2031 super_self.RunPageCyclerTest(test_name, description)
2032
2033 def test2012Q2(self):
2034 self.RunPageCyclerTest('2012Q2', '2012Q2')
1876 2035
1877 2036
1878 class MemoryTest(BasePerfTest): 2037 class MemoryTest(BasePerfTest):
1879 """Tests to measure memory consumption under different usage scenarios.""" 2038 """Tests to measure memory consumption under different usage scenarios."""
1880 2039
1881 def ExtraChromeFlags(self): 2040 def ExtraChromeFlags(self):
1882 """Launches Chrome with custom flags. 2041 """Launches Chrome with custom flags.
1883 2042
1884 Returns: 2043 Returns:
1885 A list of extra flags to pass to Chrome when it is launched. 2044 A list of extra flags to pass to Chrome when it is launched.
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
2208 """Identifies the port number to which the server is currently bound. 2367 """Identifies the port number to which the server is currently bound.
2209 2368
2210 Returns: 2369 Returns:
2211 The numeric port number to which the server is currently bound. 2370 The numeric port number to which the server is currently bound.
2212 """ 2371 """
2213 return self._server.server_address[1] 2372 return self._server.server_address[1]
2214 2373
2215 2374
2216 if __name__ == '__main__': 2375 if __name__ == '__main__':
2217 pyauto_functional.Main() 2376 pyauto_functional.Main()
OLDNEW
« no previous file with comments | « no previous file | chrome/test/functional/webpagereplay.py » ('j') | chrome/test/functional/webpagereplay.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698