Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Basic pyauto performance tests. | 6 """Basic pyauto performance tests. |
| 7 | 7 |
| 8 For tests that need to be run for multiple iterations (e.g., so that average | 8 For tests that need to be run for multiple iterations (e.g., so that average |
| 9 and standard deviation values can be reported), the default number of iterations | 9 and standard deviation values can be reported), the default number of iterations |
| 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. | 10 run for each of these tests is specified by |_DEFAULT_NUM_ITERATIONS|. |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 41 import urllib2 | 41 import urllib2 |
| 42 import urlparse | 42 import urlparse |
| 43 | 43 |
| 44 import pyauto_functional # Must be imported before pyauto. | 44 import pyauto_functional # Must be imported before pyauto. |
| 45 import pyauto | 45 import pyauto |
| 46 import simplejson # Must be imported after pyauto; located in third_party. | 46 import simplejson # Must be imported after pyauto; located in third_party. |
| 47 | 47 |
| 48 from netflix import NetflixTestHelper | 48 from netflix import NetflixTestHelper |
| 49 import pyauto_utils | 49 import pyauto_utils |
| 50 import test_utils | 50 import test_utils |
| 51 import webpagereplay | |
| 51 from youtube import YoutubeTestHelper | 52 from youtube import YoutubeTestHelper |
| 52 | 53 |
| 53 | 54 |
| 54 class BasePerfTest(pyauto.PyUITest): | 55 class BasePerfTest(pyauto.PyUITest): |
| 55 """Base class for performance tests.""" | 56 """Base class for performance tests.""" |
| 56 | 57 |
| 57 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py. | 58 _DEFAULT_NUM_ITERATIONS = 10 # Keep synced with desktopui_PyAutoPerfTests.py. |
| 58 _DEFAULT_MAX_TIMEOUT_COUNT = 10 | 59 _DEFAULT_MAX_TIMEOUT_COUNT = 10 |
| 59 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_' | 60 _PERF_OUTPUT_MARKER_PRE = '_PERF_PRE_' |
| 60 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_' | 61 _PERF_OUTPUT_MARKER_POST = '_PERF_POST_' |
| (...skipping 1665 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1726 logging.info('Total v8 heap size: %f MB', v8_heap_size) | 1727 logging.info('Total v8 heap size: %f MB', v8_heap_size) |
| 1727 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB', | 1728 self._OutputPerfGraphValue(description + 'V8HeapSize', v8_heap_size, 'MB', |
| 1728 graph_name + '_v8_heap_size') | 1729 graph_name + '_v8_heap_size') |
| 1729 | 1730 |
| 1730 def testAngryBirds(self): | 1731 def testAngryBirds(self): |
| 1731 """Measures performance for Angry Birds.""" | 1732 """Measures performance for Angry Birds.""" |
| 1732 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds', | 1733 self._RunLiveGamePerfTest('http://chrome.angrybirds.com', 'Angry Birds', |
| 1733 'AngryBirds', 'angry_birds') | 1734 'AngryBirds', 'angry_birds') |
| 1734 | 1735 |
| 1735 | 1736 |
| 1736 class PageCyclerTest(BasePerfTest): | 1737 class BasePageCyclerTest(BasePerfTest): |
| 1737 """Tests to run various page cyclers.""" | 1738 """Page class for page cycler tests. |
| 1739 | |
| 1740 Derived classes must implement _StartUrl(). | |
|
Nirnimesh
2012/05/18 19:19:13
Methods prefixed with _ are considered private. De
slamm_google
2012/05/18 23:59:28
Done. Thanks I was unclear on that. I changed _Run
| |
| 1741 """ | |
| 1738 | 1742 |
| 1739 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data | 1743 # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data |
| 1740 PC_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, | 1744 DATA_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, |
| 1741 os.pardir, 'data', 'page_cycler') | 1745 os.pardir, 'data', 'page_cycler') |
| 1746 @classmethod | |
|
tonyg
2012/05/18 15:37:54
nit: blank line above this
slamm_google
2012/05/18 23:59:28
Done.
| |
| 1747 def DataPath(cls, subdir): | |
| 1748 return os.path.join(cls.DATA_PATH, subdir) | |
| 1742 | 1749 |
| 1743 def ExtraChromeFlags(self): | 1750 def ExtraChromeFlags(self): |
| 1744 """Ensures Chrome is launched with custom flags. | 1751 """Ensures Chrome is launched with custom flags. |
| 1745 | 1752 |
| 1746 Returns: | 1753 Returns: |
| 1747 A list of extra flags to pass to Chrome when it is launched. | 1754 A list of extra flags to pass to Chrome when it is launched. |
| 1748 """ | 1755 """ |
| 1749 # Extra flags required to run these tests. | 1756 # Extra flags required to run these tests. |
| 1750 # The first two are needed for the test. | 1757 # The first two are needed for the test. |
| 1751 # The plugins argument is to prevent bad scores due to pop-ups from | 1758 # The plugins argument is to prevent bad scores due to pop-ups from |
| 1752 # running an old version of something (like Flash). | 1759 # running an old version of something (like Flash). |
| 1753 return (super(PageCyclerTest, self).ExtraChromeFlags() + | 1760 return (super(BasePageCyclerTest, self).ExtraChromeFlags() + |
| 1754 ['--js-flags="--expose_gc"', | 1761 ['--js-flags="--expose_gc"', |
| 1755 '--enable-file-cookies', | 1762 '--enable-file-cookies', |
| 1756 '--allow-outdated-plugins']) | 1763 '--allow-outdated-plugins']) |
| 1757 | 1764 |
| 1758 def _PreReadDir(self, dir): | 1765 def _WaitUntilDone(self, url, iterations): |
| 1766 """Check cookies for "__pc_done=1" to know the test is over.""" | |
| 1767 def IsDone(): | |
| 1768 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0 | |
| 1769 return '__pc_done=1' in cookies | |
| 1770 self.assertTrue( | |
| 1771 self.WaitUntil(IsDone, timeout=(60 * iterations), retry_sleep=1), | |
| 1772 msg='Timed out waiting for page cycler test to complete.') | |
| 1773 | |
| 1774 def _CollectPagesAndTimes(self, url): | |
| 1775 """Collect the results from the cookies.""" | |
| 1776 pages, times = None, None | |
| 1777 cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0 | |
| 1778 for cookie in cookies.split(';'): | |
| 1779 if '__pc_pages' in cookie: | |
| 1780 pages_str = cookie.split('=', 1)[1] | |
| 1781 pages = pages_str.split(',') | |
| 1782 elif '__pc_timings' in cookie: | |
| 1783 times_str = cookie.split('=', 1)[1] | |
| 1784 times = [float(t) for t in times_str.split(',')] | |
| 1785 self.assertTrue(pages and times, | |
| 1786 msg='Unable to find test results in cookies: %s' % cookies) | |
| 1787 | |
| 1788 def _IteratePageTimes(self, times, num_pages, iterations): | |
| 1789 """Regroup the times by the page. | |
| 1790 | |
| 1791 Args: | |
| 1792 times: e.g. [page1_iter1, page1_iter2, ..., page2_iter1, page2_iter2, ...] | |
| 1793 num_pages: the number of pages | |
| 1794 iterations: the number of times for each page | |
| 1795 Yields: | |
| 1796 times for one page: [page1_iter1, page1_iter2, ...] | |
| 1797 """ | |
| 1798 expected_num_times = num_pages * iterations | |
| 1799 self.assertEqual( | |
| 1800 expected_num_times, len(times), | |
| 1801 msg=('len(times) != num_pages * iterations: %s != %s * %s, times=%s' % | |
| 1802 (len(times), num_pages, iterations, times))) | |
| 1803 | |
| 1804 next_time = iter(times).next | |
| 1805 for _ in range(num_pages): | |
| 1806 yield [next_time() for _ in range(iterations)] | |
| 1807 | |
| 1808 def _TrimTimes(self, times, percent): | |
| 1809 """Return a new list with |percent| number of times trimmed for each page. | |
| 1810 | |
| 1811 Removes the largest and smallest values. | |
| 1812 """ | |
| 1813 iterations = len(times) | |
| 1814 times = sorted(times) | |
| 1815 logging.debug('Before trimming %d: %s' % num_to_drop, times) | |
| 1816 | |
| 1817 num_to_trim = int(iterations * float(percent) / 100.0) | |
| 1818 a = num_to_drop / 2 | |
| 1819 b = iterations - (num_to_trim / 2 + num_to_trim % 2) | |
| 1820 trimmed_times = times[a:b] | |
| 1821 logging.debug('After trimming: %s', trimmed_times) | |
| 1822 return trimmed_times | |
| 1823 | |
| 1824 def _GetArithmeticMean(self, values): | |
|
tonyg
2012/05/18 15:37:54
These math helper functions shouldn't be named as
slamm_google
2012/05/18 23:59:28
I do not see any collection of functions in this d
| |
| 1825 """Return the arithmetic mean of |values|.""" | |
| 1826 return sum(values) / len(values) | |
| 1827 | |
| 1828 def _GetGeometricMean(self, values): | |
| 1829 """Return the geometric mean of |values|.""" | |
| 1830 return reduce(lambda x, y: x * y, values) ** (1.0 / len(values)) | |
| 1831 | |
| 1832 def _ComputeFinalResult(self, times, num_pages, iterations): | |
| 1833 """The final score that is calculated is a geometric mean of the | |
| 1834 arithmetic means of each page's load time, and we drop the | |
| 1835 upper/lower 20% of the times for each page so they don't skew the | |
| 1836 mean. The geometric mean is used for the final score because the | |
| 1837 time range for any given site may be very different, and we don't | |
| 1838 want slower sites to weight more heavily than others. | |
| 1839 """ | |
| 1840 page_means = [ | |
| 1841 self._GetArithmeticMean(self._TrimTimes(times, percent=20)) | |
| 1842 for times in self._IteratePageTimes(times, num_pages, iterations)] | |
| 1843 return self._GetGeometricMean(page_means) | |
| 1844 | |
| 1845 def _StartUrl(self, test_name, iterations): | |
| 1846 """Return the URL to used to start the test. | |
| 1847 | |
| 1848 Subclasses must implement this. | |
| 1849 """ | |
| 1850 raise NotImplemented | |
| 1851 | |
| 1852 def _RunPageCyclerTest(self, name, description): | |
| 1853 """Runs the specified PageCycler test. | |
| 1854 | |
|
tonyg
2012/05/18 15:37:54
nit: extra line break
slamm_google
2012/05/18 23:59:28
Done.
| |
| 1855 | |
| 1856 Args: | |
| 1857 name: the page cycler test name (corresponds to a directory or test file) | |
| 1858 description: a string description for the test | |
| 1859 """ | |
| 1860 iterations = self._num_iterations | |
| 1861 start_url = self._StartUrl(name, iterations) | |
| 1862 self.NavigateToURL(start_url) | |
| 1863 self._WaitUntilDone(start_url, iterations) | |
| 1864 pages, times = self._CollectPagesAndTimes(start_url) | |
| 1865 final_result = self._ComputeFinalResult(times, len(pages), iterations) | |
| 1866 logging.info('%s page cycler final result: %f' % | |
| 1867 (description, final_result)) | |
| 1868 self._OutputPerfGraphValue(description + '_PageCycler', final_result, | |
| 1869 'milliseconds', graph_name='PageCycler') | |
| 1870 | |
| 1871 | |
| 1872 class PageCyclerTest(BasePageCyclerTest): | |
| 1873 """Tests to run various page cyclers.""" | |
| 1874 | |
| 1875 def _PreReadDataDir(self, subdir): | |
| 1759 """This recursively reads all of the files in a given url directory. | 1876 """This recursively reads all of the files in a given url directory. |
| 1760 | 1877 |
| 1761 The intent is to get them into memory before they are used by the benchmark. | 1878 The intent is to get them into memory before they are used by the benchmark. |
| 1879 | |
| 1880 Args: | |
| 1881 subdir: a subdirectory of the page cycler data directory. | |
| 1762 """ | 1882 """ |
| 1763 def _PreReadDir(dirname, names): | 1883 def _PreReadDir(dirname, names): |
| 1764 for rfile in names: | 1884 for rfile in names: |
| 1765 with open(os.path.join(dirname, rfile)) as fp: | 1885 with open(os.path.join(dirname, rfile)) as fp: |
| 1766 fp.read() | 1886 fp.read() |
| 1767 | 1887 for root, dirs, files in os.walk(self.DataPath(subdir)): |
| 1768 for root, dirs, files in os.walk(os.path.dirname(dir)): | |
| 1769 _PreReadDir(root, files) | 1888 _PreReadDir(root, files) |
| 1770 | 1889 |
| 1771 def setUp(self): | 1890 def _StartUrl(self, test_name, iterations): |
| 1772 self._PreReadDir(os.path.join(self.PC_PATH, 'common')) | 1891 return self.GetFileURLForDataPath( |
| 1773 BasePerfTest.setUp(self) | 1892 self.DataPath(test_name), |
| 1774 | 1893 'start.html?auto=1&iterations=%d' % iterations) |
| 1775 def _RunPageCyclerTest(self, dirname, iterations, description): | 1894 |
| 1895 def _RunPageCyclerTest(self, dirname, description): | |
| 1776 """Runs the specified PageCycler test. | 1896 """Runs the specified PageCycler test. |
| 1777 | 1897 |
| 1778 The final score that is calculated is a geometric mean of the | 1898 Args: |
| 1779 arithmetic means of each site's load time, and we drop the upper | 1899 dirname: directory containing the page cycler test |
| 1780 20% of the times for each site so they don't skew the mean. | 1900 description: a string description for the test |
| 1781 The Geometric mean is used for the final score because the time | 1901 """ |
| 1782 range for any given site may be very different, and we don't want | 1902 self._PreReadDataDir('common') |
| 1783 slower sites to weight more heavily than others. | 1903 self._PreReadDataDir(dirname) |
| 1784 | 1904 super(PageCyclerTest, self)._RunPageCyclerTest(dirname, description) |
| 1785 Args: | |
| 1786 dirname: The directory containing the page cycler test. | |
| 1787 iterations: How many times to run through the set of pages. | |
| 1788 description: A string description for the particular test being run. | |
| 1789 """ | |
| 1790 self._PreReadDir(os.path.join(self.PC_PATH, dirname)) | |
| 1791 | |
| 1792 url = self.GetFileURLForDataPath(os.path.join(self.PC_PATH, dirname), | |
| 1793 'start.html') | |
| 1794 | |
| 1795 self.NavigateToURL('%s?auto=1&iterations=%d' % (url, iterations)) | |
| 1796 | |
| 1797 # Check cookies for "__pc_done=1" to know the test is over. | |
| 1798 def IsTestDone(): | |
| 1799 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0. | |
| 1800 return '__pc_done=1' in cookies | |
| 1801 | |
| 1802 self.assertTrue( | |
| 1803 self.WaitUntil(IsTestDone, timeout=(60 * iterations), retry_sleep=1), | |
| 1804 msg='Timed out waiting for page cycler test to complete.') | |
| 1805 | |
| 1806 # Collect the results from the cookies. | |
| 1807 site_to_time_list = {} | |
| 1808 cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0. | |
| 1809 site_list = '' | |
| 1810 time_list = '' | |
| 1811 for cookie in cookies.split(';'): | |
| 1812 if '__pc_pages' in cookie: | |
| 1813 site_list = cookie[cookie.find('=') + 1:] | |
| 1814 elif '__pc_timings' in cookie: | |
| 1815 time_list = cookie[cookie.find('=') + 1:] | |
| 1816 self.assertTrue(site_list and time_list, | |
| 1817 msg='Could not find test results in cookies: %s' % cookies) | |
| 1818 site_list = site_list.split(',') | |
| 1819 time_list = time_list.split(',') | |
| 1820 self.assertEqual(iterations, len(time_list) / len(site_list), | |
| 1821 msg='Iteration count %d does not match with site/timing ' | |
| 1822 'lists: %s and %s' % (iterations, site_list, time_list)) | |
| 1823 for site_index, site in enumerate(site_list): | |
| 1824 site_to_time_list[site] = [] | |
| 1825 for iteration_index in xrange(iterations): | |
| 1826 site_to_time_list[site].append( | |
| 1827 float(time_list[iteration_index * len(site_list) + site_index])) | |
| 1828 | |
| 1829 site_times = [] | |
| 1830 for site, time_list in site_to_time_list.iteritems(): | |
| 1831 sorted_times = sorted(time_list) | |
| 1832 num_to_drop = int(len(sorted_times) * 0.2) | |
| 1833 logging.debug('Before dropping %d: ' % num_to_drop) | |
| 1834 logging.debug(sorted_times) | |
| 1835 if num_to_drop: | |
| 1836 sorted_times = sorted_times[:-num_to_drop] | |
| 1837 logging.debug('After dropping:') | |
| 1838 logging.debug(sorted_times) | |
| 1839 # Do an arithmetic mean of the load times for a given page. | |
| 1840 mean_time = sum(sorted_times) / len(sorted_times) | |
| 1841 logging.debug('Mean time is: ' + str(mean_time)) | |
| 1842 site_times.append(mean_time) | |
| 1843 | |
| 1844 logging.info('site times = %s' % site_times) | |
| 1845 # Compute a geometric mean over the averages for each site. | |
| 1846 final_result = reduce(lambda x, y: x * y, | |
| 1847 site_times) ** (1.0/ len(site_times)) | |
| 1848 logging.info('%s page cycler final result: %f' % | |
| 1849 (description, final_result)) | |
| 1850 self._OutputPerfGraphValue(description + '_PageCycler', final_result, | |
| 1851 'milliseconds', graph_name='PageCycler') | |
| 1852 | 1905 |
| 1853 def testMoreJSFile(self): | 1906 def testMoreJSFile(self): |
| 1854 self._RunPageCyclerTest('morejs', self._num_iterations, 'MoreJSFile') | 1907 self._RunPageCyclerTest('morejs', 'MoreJSFile') |
| 1855 | 1908 |
| 1856 def testAlexaFile(self): | 1909 def testAlexaFile(self): |
| 1857 self._RunPageCyclerTest('alexa_us', self._num_iterations, 'Alexa_usFile') | 1910 self._RunPageCyclerTest('alexa_us', 'Alexa_usFile') |
| 1858 | 1911 |
| 1859 def testBloatFile(self): | 1912 def testBloatFile(self): |
| 1860 self._RunPageCyclerTest('bloat', self._num_iterations, 'BloatFile') | 1913 self._RunPageCyclerTest('bloat', 'BloatFile') |
| 1861 | 1914 |
| 1862 def testDHTMLFile(self): | 1915 def testDHTMLFile(self): |
| 1863 self._RunPageCyclerTest('dhtml', self._num_iterations, 'DhtmlFile') | 1916 self._RunPageCyclerTest('dhtml', 'DhtmlFile') |
| 1864 | 1917 |
| 1865 def testIntl1File(self): | 1918 def testIntl1File(self): |
| 1866 self._RunPageCyclerTest('intl1', self._num_iterations, 'Intl1File') | 1919 self._RunPageCyclerTest('intl1', 'Intl1File') |
| 1867 | 1920 |
| 1868 def testIntl2File(self): | 1921 def testIntl2File(self): |
| 1869 self._RunPageCyclerTest('intl2', self._num_iterations, 'Intl2File') | 1922 self._RunPageCyclerTest('intl2', 'Intl2File') |
| 1870 | 1923 |
| 1871 def testMozFile(self): | 1924 def testMozFile(self): |
| 1872 self._RunPageCyclerTest('moz', self._num_iterations, 'MozFile') | 1925 self._RunPageCyclerTest('moz', 'MozFile') |
| 1873 | 1926 |
| 1874 def testMoz2File(self): | 1927 def testMoz2File(self): |
| 1875 self._RunPageCyclerTest('moz2', self._num_iterations, 'Moz2File') | 1928 self._RunPageCyclerTest('moz2', 'Moz2File') |
| 1929 | |
| 1930 | |
| 1931 class WebPageReplayPageCyclerTest(BasePageCyclerTest): | |
|
tonyg
2012/05/18 15:37:54
Please add a class level docstring.
slamm_google
2012/05/18 23:59:28
Done.
| |
| 1932 _TEST_EXE_NAME = 'perf_py' | |
| 1933 _IS_DNS_FORWARDED = False | |
| 1934 | |
| 1935 def ChromiumPaths(self, test_name=None): | |
| 1936 return webpagereplay.ChromiumPaths( | |
| 1937 TEST_EXE_NAME=self._TEST_EXE_NAME, TEST_NAME=test_name) | |
| 1938 | |
| 1939 def ExtraChromeFlags(self): | |
| 1940 """Ensures Chrome is launched with custom flags. | |
| 1941 | |
| 1942 Returns: | |
| 1943 A list of extra flags to pass to Chrome when it is launched. | |
| 1944 """ | |
| 1945 flags = super(WebPageReplayPageCyclerTest, self).ExtraChromeFlags() | |
| 1946 chromium_paths = self.ChromiumPaths() | |
| 1947 webpagereplay.ChromeFlags( | |
| 1948 flags, | |
| 1949 extension_path=chromium_paths['extension'], | |
| 1950 is_dns_forwarded=self._IS_DNS_FORWARDED) | |
| 1951 return flags | |
| 1952 | |
| 1953 def _StartUrl(self, test_name, iterations): | |
| 1954 chromium_paths = self.ChromiumPaths(test_name=test_name) | |
| 1955 return chromium_paths.GetStartUrl(iterations, use_auto=True) | |
| 1956 | |
| 1957 def _RunPageCyclerTest(self, name, description): | |
| 1958 """Runs the specified PageCycler test. | |
| 1959 | |
| 1960 Args: | |
| 1961 name: name for archive and config files: |name|.wpr and |name|.js. | |
| 1962 description: a string description for the test | |
| 1963 """ | |
| 1964 chromium_paths = self.ChromiumPaths(test_name=name) | |
| 1965 replay_options = [] | |
| 1966 if not self._IS_DNS_FORWARDED: | |
| 1967 replay_options.append('--no-dns_forwarding') | |
| 1968 is_record_mode = False # TODO(slamm): get from environment variable? | |
| 1969 if is_record_mode: | |
| 1970 replay_options.append('--record') | |
| 1971 with webpagereplay.ReplayServer( | |
| 1972 chromium_paths['replay'], | |
| 1973 chromium_paths.GetArchivePath(), | |
| 1974 chromium_paths['logs'], | |
| 1975 replay_options): | |
| 1976 super_self = super(WebPageReplayPageCyclerTest, self) | |
| 1977 super_self._RunPageCyclerTest(name, description) | |
| 1978 | |
|
tonyg
2012/05/18 15:37:54
nit: extra line break
slamm_google
2012/05/18 23:59:28
Done.
| |
| 1979 | |
| 1980 def testWpr2012Q2(self): | |
| 1981 self._RunPageCyclerTest('2012Q2', 'Wpr2012Q2') | |
|
dennis_jeffrey
2012/05/18 00:28:41
Is this expected to be able to run on ChromeOS?
B
slamm_google
2012/05/18 23:59:29
Thanks for the heads-up.
Does this python code r
Sonny
2012/05/19 00:50:10
It runs directly on ChromeOS, and as Dennis mentio
| |
| 1876 | 1982 |
| 1877 | 1983 |
| 1878 class MemoryTest(BasePerfTest): | 1984 class MemoryTest(BasePerfTest): |
| 1879 """Tests to measure memory consumption under different usage scenarios.""" | 1985 """Tests to measure memory consumption under different usage scenarios.""" |
| 1880 | 1986 |
| 1881 def ExtraChromeFlags(self): | 1987 def ExtraChromeFlags(self): |
| 1882 """Launches Chrome with custom flags. | 1988 """Launches Chrome with custom flags. |
| 1883 | 1989 |
| 1884 Returns: | 1990 Returns: |
| 1885 A list of extra flags to pass to Chrome when it is launched. | 1991 A list of extra flags to pass to Chrome when it is launched. |
| (...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2208 """Identifies the port number to which the server is currently bound. | 2314 """Identifies the port number to which the server is currently bound. |
| 2209 | 2315 |
| 2210 Returns: | 2316 Returns: |
| 2211 The numeric port number to which the server is currently bound. | 2317 The numeric port number to which the server is currently bound. |
| 2212 """ | 2318 """ |
| 2213 return self._server.server_address[1] | 2319 return self._server.server_address[1] |
| 2214 | 2320 |
| 2215 | 2321 |
| 2216 if __name__ == '__main__': | 2322 if __name__ == '__main__': |
| 2217 pyauto_functional.Main() | 2323 pyauto_functional.Main() |
| OLD | NEW |