Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(568)

Side by Side Diff: chrome/test/functional/perf.py

Issue 10411011: Add Web Page Replay enabled page cycler tests to pyauto. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Allow up to 3 minutes per WPR iteration. Add options for alternate WPR code. Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « chrome/test/functional/PYAUTO_TESTS ('k') | chrome/test/functional/webpagereplay.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Basic pyauto performance tests. 6 """Basic pyauto performance tests.
7 7
8 For tests that need to be run for multiple iterations (e.g., so that average 8 For tests that need to be run for multiple iterations (e.g., so that average
9 and standard deviation values can be reported), the default number of iterations 9 and standard deviation values can be reported), the default number of iterations
10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 import urllib2 43 import urllib2
44 import urlparse 44 import urlparse
45 45
46 import pyauto_functional # Must be imported before pyauto. 46 import pyauto_functional # Must be imported before pyauto.
47 import pyauto 47 import pyauto
48 import simplejson # Must be imported after pyauto; located in third_party. 48 import simplejson # Must be imported after pyauto; located in third_party.
49 49
50 from netflix import NetflixTestHelper 50 from netflix import NetflixTestHelper
51 import pyauto_utils 51 import pyauto_utils
52 import test_utils 52 import test_utils
53 import webpagereplay
53 from youtube import YoutubeTestHelper 54 from youtube import YoutubeTestHelper
54 55
55 56
57 def Mean(values):
58 """Return the arithmetic mean of |values|."""
59 if not values or None in values:
60 return None
61 return sum(values) / float(len(values))
62
63
64 def GeometricMean(values):
65 """Return the geometric mean of |values|."""
66 if not values or None in values or [x for x in values if x < 0.0]:
67 return None
68 if 0.0 in values:
69 return 0.0
70 return math.exp(Mean([math.log(x) for x in values]))
71
72
56 class BasePerfTest(pyauto.PyUITest): 73 class BasePerfTest(pyauto.PyUITest):
57 """Base class for performance tests.""" 74 """Base class for performance tests."""
58 75
59 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py. 76 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py.
60 _DEFAULT_MAX_TIMEOUT_COUNT = 10 77 _DEFAULT_MAX_TIMEOUT_COUNT = 10
61 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_' 78 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_'
62 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_' 79 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_'
63 80
64 def setUp(self): 81 def setUp(self):
65 """Performs necessary setup work before running each test.""" 82 """Performs necessary setup work before running each test."""
(...skipping 1705 matching lines...) Expand 10 before | Expand all | Expand 10 after
1771 logging.info('Total v8 heap size: %f MB', v8_heap_size) 1788 logging.info('Total v8 heap size: %f MB', v8_heap_size)
1772 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB', 1789 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB',
1773 graph_name + '_v8_heap_size') 1790 graph_name + '_v8_heap_size')
1774 1791
1775 def testAngryBirds(self): 1792 def testAngryBirds(self):
1776 """Measures performance for Angry Birds.""" 1793 """Measures performance for Angry Birds."""
1777 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds', 1794 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds',
1778 'AngryBirds', 'angry_birds') 1795 'AngryBirds', 'angry_birds')
1779 1796
1780 1797
1781 class PageCyclerTest(BasePerfTest): 1798 class BasePageCyclerTest(BasePerfTest):
1782 """Tests to run various page cyclers.""" 1799 """Page class for page cycler tests.
1800
1801 Setting 'PC_NO_AUTO=1' in the environment avoids automatically running
1802 through all the pages.
1803
1804 Derived classes must implement StartUrl().
1805 """
1806 MAX_ITERATION_SECONDS = 60
1807 TRIM_PERCENT = 20
1808 DEFAULT_USE_AUTO = True
1783 1809
1784 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data 1810 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data
1785 PC_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, 1811 DATA_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir,
1786 os.pardir, 'data', 'page_cycler') 1812 os.pardir, 'data', 'page_cycler')
1813
1814 def setUp(self):
1815 """Performs necessary setup work before running each test."""
1816 super(BasePageCyclerTest, self).setUp()
1817 self.use_auto = 'PC_NO_AUTO' not in os.environ
1818
1819 @classmethod
1820 def DataPath(cls, subdir):
1821 return os.path.join(cls.DATA_PATH, subdir)
1787 1822
1788 def ExtraChromeFlags(self): 1823 def ExtraChromeFlags(self):
1789 """Ensures Chrome is launched with custom flags. 1824 """Ensures Chrome is launched with custom flags.
1790 1825
1791 Returns: 1826 Returns:
1792 A list of extra flags to pass to Chrome when it is launched. 1827 A list of extra flags to pass to Chrome when it is launched.
1793 """ 1828 """
1794 # Extra flags required to run these tests. 1829 # Extra flags required to run these tests.
1795 # The first two are needed for the test. 1830 # The first two are needed for the test.
1796 # The plugins argument is to prevent bad scores due to pop-ups from 1831 # The plugins argument is to prevent bad scores due to pop-ups from
1797 # running an old version of something (like Flash). 1832 # running an old version of something (like Flash).
1798 return (super(PageCyclerTest, self).ExtraChromeFlags() + 1833 return (super(BasePageCyclerTest, self).ExtraChromeFlags() +
1799 ['--js-flags="--expose_gc"', 1834 ['--js-flags="--expose_gc"',
1800 '--enable-file-cookies', 1835 '--enable-file-cookies',
1801 '--allow-outdated-plugins']) 1836 '--allow-outdated-plugins'])
1802 1837
1803 def _PreReadDir(self, dir): 1838 def WaitUntilDone(self, url, iterations):
1839 """Check cookies for "__pc_done=1" to know the test is over."""
1840 def IsDone():
1841 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
1842 return '__pc_done=1' in cookies
1843 self.assertTrue(
1844 self.WaitUntil(
1845 IsDone,
1846 timeout=(self.MAX_ITERATION_SECONDS * iterations),
1847 retry_sleep=1),
1848 msg='Timed out waiting for page cycler test to complete.')
1849
1850 def CollectPagesAndTimes(self, url):
1851 """Collect the results from the cookies."""
1852 pages, times = None, None
1853 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
1854 for cookie in cookies.split(';'):
1855 if '__pc_pages' in cookie:
1856 pages_str = cookie.split('=', 1)[1]
1857 pages = pages_str.split(',')
1858 elif '__pc_timings' in cookie:
1859 times_str = cookie.split('=', 1)[1]
1860 times = [float(t) for t in times_str.split(',')]
1861 self.assertTrue(pages and times,
1862 msg='Unable to find test results in cookies: %s' % cookies)
1863 return pages, times
1864
1865 def IteratePageTimes(self, pages, times, iterations):
1866 """Regroup the times by the page.
1867
1868 Args:
1869 pages: the list of pages
1870 times: e.g. [page1_iter1, page1_iter2, ..., page2_iter1, page2_iter2, ...]
1871 iterations: the number of times for each page
1872 Yields:
1873 (pageN, [pageN_iter1, pageN_iter2, ...])
1874 """
1875 num_pages = len(pages)
1876 num_times = len(times)
1877 expected_num_times = num_pages * iterations
1878 self.assertEqual(
1879 expected_num_times, num_times,
1880 msg=('num_times != num_pages * iterations: %s != %s * %s, times=%s' %
1881 (num_times, num_pages, iterations, times)))
1882 next_time = iter(times).next
1883 for page in pages:
1884 yield page, [next_time() for _ in range(iterations)]
1885
1886 def CheckPageTimes(self, pages, times, iterations):
1887 """Assert that all the times are greater than zero."""
1888 failed_pages = []
1889 for page, times in self.IteratePageTimes(pages, times, iterations):
1890 failed_times = [t for t in times if t <= 0.0]
1891 if failed_times:
1892 failed_pages.append((page, failed_times))
1893 if failed_pages:
1894 self.fail('Pages with unexpected times: %s' % failed_pages)
1895
1896 def TrimTimes(self, times, percent):
1897 """Return a new list with |percent| number of times trimmed for each page.
1898
1899 Removes the largest and smallest values.
1900 """
1901 iterations = len(times)
1902 times = sorted(times)
1903 num_to_trim = int(iterations * float(percent) / 100.0)
1904 logging.debug('Before trimming %d: %s' % (num_to_trim, times))
1905 a = num_to_trim / 2
1906 b = iterations - (num_to_trim / 2 + num_to_trim % 2)
1907 trimmed_times = times[a:b]
1908 logging.debug('After trimming: %s', trimmed_times)
1909 return trimmed_times
1910
1911 def ComputeFinalResult(self, pages, times, iterations):
1912 """The final score that is calculated is a geometric mean of the
1913 arithmetic means of each page's load time, and we drop the
1914 upper/lower 20% of the times for each page so they don't skew the
1915 mean. The geometric mean is used for the final score because the
1916 time range for any given site may be very different, and we don't
1917 want slower sites to weight more heavily than others.
1918 """
1919 self.CheckPageTimes(pages, times, iterations)
1920 page_means = [
1921 Mean(self.TrimTimes(times, percent=self.TRIM_PERCENT))
1922 for _, times in self.IteratePageTimes(pages, times, iterations)]
1923 return GeometricMean(page_means)
1924
1925 def StartUrl(self, test_name, iterations):
1926 """Return the URL to used to start the test.
1927
1928 Derived classes must implement this.
1929 """
1930 raise NotImplemented
1931
1932 def RunPageCyclerTest(self, name, description):
1933 """Runs the specified PageCycler test.
1934
1935 Args:
1936 name: the page cycler test name (corresponds to a directory or test file)
1937 description: a string description for the test
1938 """
1939 iterations = self._num_iterations
1940 start_url = self.StartUrl(name, iterations)
1941 self.NavigateToURL(start_url)
1942 self.WaitUntilDone(start_url, iterations)
1943 pages, times = self.CollectPagesAndTimes(start_url)
1944 final_result = self.ComputeFinalResult(pages, times, iterations)
1945 logging.info('%s page cycler final result: %f' %
1946 (description, final_result))
1947 self._OutputPerfGraphValue(description + '_PageCycler', final_result,
1948 'milliseconds', graph_name='PageCycler')
1949
1950
1951 class PageCyclerTest(BasePageCyclerTest):
1952 """Tests to run various page cyclers.
1953
1954 Setting 'PC_NO_AUTO=1' in the environment avoids automatically running
1955 through all the pages.
1956 """
1957
1958 def _PreReadDataDir(self, subdir):
1804 """This recursively reads all of the files in a given url directory. 1959 """This recursively reads all of the files in a given url directory.
1805 1960
1806 The intent is to get them into memory before they are used by the benchmark. 1961 The intent is to get them into memory before they are used by the benchmark.
1962
1963 Args:
1964 subdir: a subdirectory of the page cycler data directory.
1807 """ 1965 """
1808 def _PreReadDir(dirname, names): 1966 def _PreReadDir(dirname, names):
1809 for rfile in names: 1967 for rfile in names:
1810 with open(os.path.join(dirname, rfile)) as fp: 1968 with open(os.path.join(dirname, rfile)) as fp:
1811 fp.read() 1969 fp.read()
1812 1970 for root, dirs, files in os.walk(self.DataPath(subdir)):
1813 for root, dirs, files in os.walk(os.path.dirname(dir)):
1814 _PreReadDir(root, files) 1971 _PreReadDir(root, files)
1815 1972
1973 def StartUrl(self, test_name, iterations):
1974 start_url = self.GetFileURLForDataPath(
1975 self.DataPath(test_name), 'start.html?iterations=&d' % iterations)
1976 if self.use_auto:
1977 start_url += '&auto=1'
1978 return start_url
1979
1980 def RunPageCyclerTest(self, dirname, description):
1981 """Runs the specified PageCycler test.
1982
1983 Args:
1984 dirname: directory containing the page cycler test
1985 description: a string description for the test
1986 """
1987 self._PreReadDataDir('common')
1988 self._PreReadDataDir(dirname)
1989 super(PageCyclerTest, self).RunPageCyclerTest(dirname, description)
1990
1991 def testMoreJSFile(self):
1992 self.RunPageCyclerTest('morejs', 'MoreJSFile')
1993
1994 def testAlexaFile(self):
1995 self.RunPageCyclerTest('alexa_us', 'Alexa_usFile')
1996
1997 def testBloatFile(self):
1998 self.RunPageCyclerTest('bloat', 'BloatFile')
1999
2000 def testDHTMLFile(self):
2001 self.RunPageCyclerTest('dhtml', 'DhtmlFile')
2002
2003 def testIntl1File(self):
2004 self.RunPageCyclerTest('intl1', 'Intl1File')
2005
2006 def testIntl2File(self):
2007 self.RunPageCyclerTest('intl2', 'Intl2File')
2008
2009 def testMozFile(self):
2010 self.RunPageCyclerTest('moz', 'MozFile')
2011
2012 def testMoz2File(self):
2013 self.RunPageCyclerTest('moz2', 'Moz2File')
2014
2015
2016 class WebPageReplayPageCyclerTest(BasePageCyclerTest):
2017 """Tests to run Web Page Replay backed page cycler tests.
2018
2019 Web Page Replay is a proxy that can record and "replay" web pages with
2020 simulated network characteristics -- without having to edit the pages
2021 by hand. With WPR, tests can use "real" web content, and catch
2022 performance issues that may result from introducing network delays and
2023 bandwidth throttling.
2024
2025 Setting 'PC_NO_AUTO=1' in the environment avoids automatically running
2026 through all the pages.
2027 Setting 'PC_RECORD=1' puts WPR in record mode.
2028 """
2029 _PATHS = {
2030 'archives': 'src/data/page_cycler/webpagereplay',
2031 'wpr': 'src/data/page_cycler/webpagereplay/{test_name}.wpr',
2032 'wpr_pub': 'src/tools/page_cycler/webpagereplay/tests/{test_name}.wpr',
2033 'start_page': 'src/tools/page_cycler/webpagereplay/start.html',
2034 'extension': 'src/tools/page_cycler/webpagereplay/extension',
2035 'replay': 'src/third_party/webpagereplay',
2036 'logs': 'src/webpagereplay_logs',
2037 }
2038
2039 _BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
2040 '..', '..', '..', '..'))
2041 _IS_DNS_FORWARDED = False
2042 MAX_ITERATION_SECONDS = 180
2043
1816 def setUp(self): 2044 def setUp(self):
1817 self._PreReadDir(os.path.join(self.PC_PATH, 'common')) 2045 """Performs necessary setup work before running each test."""
1818 BasePerfTest.setUp(self) 2046 super(WebPageReplayPageCyclerTest, self).setUp()
1819 2047 self.replay_dir = os.environ.get('PC_REPLAY_DIR')
1820 def _RunPageCyclerTest(self, dirname, iterations, description): 2048 self.is_record_mode = 'PC_RECORD' in os.environ
2049 if self.is_record_mode:
2050 self._num_iterations = 1
2051
2052 @classmethod
2053 def _Path(cls, key, **kwargs):
2054 """Provide paths for page cycler tests with Web Page Replay."""
2055 chromium_path = cls._PATHS[key].format(**kwargs)
2056 return os.path.join(cls._BASE_DIR, *chromium_path.split('/'))
2057
2058 @classmethod
2059 def _ArchivePath(cls, test_name):
2060 has_private_archives = os.path.exists(cls._Path('archives'))
2061 key = 'wpr' if has_private_archives else 'wpr_pub'
2062 return cls._Path(key, test_name=test_name)
2063
2064 def ExtraChromeFlags(self):
2065 """Ensures Chrome is launched with custom flags.
2066
2067 Returns:
2068 A list of extra flags to pass to Chrome when it is launched.
2069 """
2070 flags = super(WebPageReplayPageCyclerTest, self).ExtraChromeFlags()
2071 flags.append('--load-extension=%s' % self._Path('extension'))
2072 if not self._IS_DNS_FORWARDED:
2073 flags.append('--host-resolver-rules=MAP * %s' % webpagereplay.REPLAY_HOST)
2074 flags.extend([
2075 '--testing-fixed-http-port=%s' % webpagereplay.HTTP_PORT,
2076 '--testing-fixed-https-port=%s' % webpagereplay.HTTPS_PORT,
2077 '--log-level=0',
2078 ])
2079 extra_flags = [
2080 '--disable-background-networking',
2081 '--enable-experimental-extension-apis',
2082 '--enable-logging',
2083 '--enable-stats-table',
2084 '--enable-benchmarking',
2085 '--ignore-certificate-errors',
2086 '--metrics-recording-only',
2087 '--activate-on-launch',
2088 '--no-first-run',
2089 '--no-proxy-server',
2090 ]
2091 flags.extend(f for f in extra_flags if f not in flags)
2092 return flags
2093
2094 def StartUrl(self, test_name, iterations):
2095 start_url = 'file://%s?test=%s&iterations=%d' % (
2096 self._Path('start_page'), test_name, iterations)
2097 if self.use_auto:
2098 start_url += '&auto=1'
2099 return start_url
2100
2101 def RunPageCyclerTest(self, test_name, description):
1821 """Runs the specified PageCycler test. 2102 """Runs the specified PageCycler test.
1822 2103
1823 The final score that is calculated is a geometric mean of the 2104 Args:
1824 arithmetic means of each site's load time, and we drop the upper 2105 test_name: name for archive (.wpr) and config (.js) files.
1825 20% of the times for each site so they don't skew the mean. 2106 description: a string description for the test
1826 The Geometric mean is used for the final score because the time 2107 """
1827 range for any given site may be very different, and we don't want 2108 replay_options = []
1828 slower sites to weight more heavily than others. 2109 if not self._IS_DNS_FORWARDED:
1829 2110 replay_options.append('--no-dns_forwarding')
1830 Args: 2111 if self.is_record_mode:
1831 dirname: The directory containing the page cycler test. 2112 replay_options.append('--record')
1832 iterations: How many times to run through the set of pages. 2113 if self.replay_dir:
1833 description: A string description for the particular test being run. 2114 replay_dir = self.replay_dir
1834 """ 2115 else:
1835 self._PreReadDir(os.path.join(self.PC_PATH, dirname)) 2116 self._Path('replay'),
1836 2117 with webpagereplay.ReplayServer(
1837 url = self.GetFileURLForDataPath(os.path.join(self.PC_PATH, dirname), 2118 replay_dir,
1838 'start.html') 2119 self._ArchivePath(test_name),
1839 2120 self._Path('logs'),
1840 self.NavigateToURL('%s?auto=1&iterations=%d' % (url, iterations)) 2121 replay_options):
1841 2122 super_self = super(WebPageReplayPageCyclerTest, self)
1842 # Check cookies for "__pc_done=1" to know the test is over. 2123 super_self.RunPageCyclerTest(test_name, description)
1843 def IsTestDone(): 2124
1844 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0. 2125 def test2012Q2(self):
1845 return '__pc_done=1' in cookies 2126 self.RunPageCyclerTest('2012Q2', '2012Q2')
1846
1847 self.assertTrue(
1848 self.WaitUntil(IsTestDone, timeout=(60 * iterations), retry_sleep=1),
1849 msg='Timed out waiting for page cycler test to complete.')
1850
1851 # Collect the results from the cookies.
1852 site_to_time_list = {}
1853 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0.
1854 site_list = ''
1855 time_list = ''
1856 for cookie in cookies.split(';'):
1857 if '__pc_pages' in cookie:
1858 site_list = cookie[cookie.find('=') + 1:]
1859 elif '__pc_timings' in cookie:
1860 time_list = cookie[cookie.find('=') + 1:]
1861 self.assertTrue(site_list and time_list,
1862 msg='Could not find test results in cookies: %s' % cookies)
1863 site_list = site_list.split(',')
1864 time_list = time_list.split(',')
1865 self.assertEqual(iterations, len(time_list) / len(site_list),
1866 msg='Iteration count %d does not match with site/timing '
1867 'lists: %s and %s' % (iterations, site_list, time_list))
1868 for site_index, site in enumerate(site_list):
1869 site_to_time_list[site] = []
1870 for iteration_index in xrange(iterations):
1871 site_to_time_list[site].append(
1872 float(time_list[iteration_index * len(site_list) + site_index]))
1873
1874 site_times = []
1875 for site, time_list in site_to_time_list.iteritems():
1876 sorted_times = sorted(time_list)
1877 num_to_drop = int(len(sorted_times) * 0.2)
1878 logging.debug('Before dropping %d: ' % num_to_drop)
1879 logging.debug(sorted_times)
1880 if num_to_drop:
1881 sorted_times = sorted_times[:-num_to_drop]
1882 logging.debug('After dropping:')
1883 logging.debug(sorted_times)
1884 # Do an arithmetic mean of the load times for a given page.
1885 mean_time = sum(sorted_times) / len(sorted_times)
1886 logging.debug('Mean time is: ' + str(mean_time))
1887 site_times.append(mean_time)
1888
1889 logging.info('site times = %s' % site_times)
1890 # Compute a geometric mean over the averages for each site.
1891 final_result = reduce(lambda x, y: x * y,
1892 site_times) ** (1.0/ len(site_times))
1893 logging.info('%s page cycler final result: %f' %
1894 (description, final_result))
1895 self._OutputPerfGraphValue(description + '_PageCycler', final_result,
1896 'milliseconds', graph_name='PageCycler')
1897
1898 def testMoreJSFile(self):
1899 self._RunPageCyclerTest('morejs', self._num_iterations, 'MoreJSFile')
1900
1901 def testAlexaFile(self):
1902 self._RunPageCyclerTest('alexa_us', self._num_iterations, 'Alexa_usFile')
1903
1904 def testBloatFile(self):
1905 self._RunPageCyclerTest('bloat', self._num_iterations, 'BloatFile')
1906
1907 def testDHTMLFile(self):
1908 self._RunPageCyclerTest('dhtml', self._num_iterations, 'DhtmlFile')
1909
1910 def testIntl1File(self):
1911 self._RunPageCyclerTest('intl1', self._num_iterations, 'Intl1File')
1912
1913 def testIntl2File(self):
1914 self._RunPageCyclerTest('intl2', self._num_iterations, 'Intl2File')
1915
1916 def testMozFile(self):
1917 self._RunPageCyclerTest('moz', self._num_iterations, 'MozFile')
1918
1919 def testMoz2File(self):
1920 self._RunPageCyclerTest('moz2', self._num_iterations, 'Moz2File')
1921 2127
1922 2128
1923 class MemoryTest(BasePerfTest): 2129 class MemoryTest(BasePerfTest):
1924 """Tests to measure memory consumption under different usage scenarios.""" 2130 """Tests to measure memory consumption under different usage scenarios."""
1925 2131
1926 def ExtraChromeFlags(self): 2132 def ExtraChromeFlags(self):
1927 """Launches Chrome with custom flags. 2133 """Launches Chrome with custom flags.
1928 2134
1929 Returns: 2135 Returns:
1930 A list of extra flags to pass to Chrome when it is launched. 2136 A list of extra flags to pass to Chrome when it is launched.
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
2253 """Identifies the port number to which the server is currently bound. 2459 """Identifies the port number to which the server is currently bound.
2254 2460
2255 Returns: 2461 Returns:
2256 The numeric port number to which the server is currently bound. 2462 The numeric port number to which the server is currently bound.
2257 """ 2463 """
2258 return self._server.server_address[1] 2464 return self._server.server_address[1]
2259 2465
2260 2466
2261 if __name__ == '__main__': 2467 if __name__ == '__main__':
2262 pyauto_functional.Main() 2468 pyauto_functional.Main()
OLDNEW
« no previous file with comments | « chrome/test/functional/PYAUTO_TESTS ('k') | chrome/test/functional/webpagereplay.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698