Chromium Code Reviews| Index: chrome/test/functional/perf.py |
| diff --git a/chrome/test/functional/perf.py b/chrome/test/functional/perf.py |
| index fd299e0ef73bfd9faba408db35ca1c8712315797..55a2b813bf0f206c9430beeb01497835b02b169a 100755 |
| --- a/chrome/test/functional/perf.py |
| +++ b/chrome/test/functional/perf.py |
| @@ -48,6 +48,7 @@ import simplejson # Must be imported after pyauto; located in third_party. |
| from netflix import NetflixTestHelper |
| import pyauto_utils |
| import test_utils |
| +import webpagereplay |
| from youtube import YoutubeTestHelper |
| @@ -1733,12 +1734,18 @@ class LiveGamePerfTest(BasePerfTest): |
| 'AngryBirds', 'angry_birds') |
| -class PageCyclerTest(BasePerfTest): |
| - """Tests to run various page cyclers.""" |
| +class BasePageCyclerTest(BasePerfTest): |
| + """Page class for page cycler tests. |
| + |
| + Derived classes must implement _StartUrl(). |
|
Nirnimesh
2012/05/18 19:19:13
Methods prefixed with _ are considered private. De
slamm_google
2012/05/18 23:59:28
Done. Thanks I was unclear on that. I changed _Run
|
| + """ |
| # Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data |
| - PC_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, |
| - os.pardir, 'data', 'page_cycler') |
| + DATA_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir, |
| + os.pardir, 'data', 'page_cycler') |
| + @classmethod |
|
tonyg
2012/05/18 15:37:54
nit: blank line above this
slamm_google
2012/05/18 23:59:28
Done.
|
| + def DataPath(cls, subdir): |
| + return os.path.join(cls.DATA_PATH, subdir) |
| def ExtraChromeFlags(self): |
| """Ensures Chrome is launched with custom flags. |
| @@ -1750,129 +1757,228 @@ class PageCyclerTest(BasePerfTest): |
| # The first two are needed for the test. |
| # The plugins argument is to prevent bad scores due to pop-ups from |
| # running an old version of something (like Flash). |
| - return (super(PageCyclerTest, self).ExtraChromeFlags() + |
| + return (super(BasePageCyclerTest, self).ExtraChromeFlags() + |
| ['--js-flags="--expose_gc"', |
| '--enable-file-cookies', |
| '--allow-outdated-plugins']) |
| - def _PreReadDir(self, dir): |
| - """This recursively reads all of the files in a given url directory. |
| + def _WaitUntilDone(self, url, iterations): |
| + """Check cookies for "__pc_done=1" to know the test is over.""" |
| + def IsDone(): |
| + cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0 |
| + return '__pc_done=1' in cookies |
| + self.assertTrue( |
| + self.WaitUntil(IsDone, timeout=(60 * iterations), retry_sleep=1), |
| + msg='Timed out waiting for page cycler test to complete.') |
| - The intent is to get them into memory before they are used by the benchmark. |
| - """ |
| - def _PreReadDir(dirname, names): |
| - for rfile in names: |
| - with open(os.path.join(dirname, rfile)) as fp: |
| - fp.read() |
| + def _CollectPagesAndTimes(self, url): |
| + """Collect the results from the cookies.""" |
| + pages, times = None, None |
| + cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0 |
| + for cookie in cookies.split(';'): |
| + if '__pc_pages' in cookie: |
| + pages_str = cookie.split('=', 1)[1] |
| + pages = pages_str.split(',') |
| + elif '__pc_timings' in cookie: |
| + times_str = cookie.split('=', 1)[1] |
| + times = [float(t) for t in times_str.split(',')] |
| + self.assertTrue(pages and times, |
| + msg='Unable to find test results in cookies: %s' % cookies) |
| - for root, dirs, files in os.walk(os.path.dirname(dir)): |
| - _PreReadDir(root, files) |
| + def _IteratePageTimes(self, times, num_pages, iterations): |
| + """Regroup the times by the page. |
| - def setUp(self): |
| - self._PreReadDir(os.path.join(self.PC_PATH, 'common')) |
| - BasePerfTest.setUp(self) |
| + Args: |
| + times: e.g. [page1_iter1, page1_iter2, ..., page2_iter1, page2_iter2, ...] |
| + num_pages: the number of pages |
| + iterations: the number of times for each page |
| + Yields: |
| + times for one page: [page1_iter1, page1_iter2, ...] |
| + """ |
| + expected_num_times = num_pages * iterations |
| + self.assertEqual( |
| + expected_num_times, len(times), |
| + msg=('len(times) != num_pages * iterations: %s != %s * %s, times=%s' % |
| + (len(times), num_pages, iterations, times))) |
| - def _RunPageCyclerTest(self, dirname, iterations, description): |
| - """Runs the specified PageCycler test. |
| + next_time = iter(times).next |
| + for _ in range(num_pages): |
| + yield [next_time() for _ in range(iterations)] |
| - The final score that is calculated is a geometric mean of the |
| - arithmetic means of each site's load time, and we drop the upper |
| - 20% of the times for each site so they don't skew the mean. |
| - The Geometric mean is used for the final score because the time |
| - range for any given site may be very different, and we don't want |
| - slower sites to weight more heavily than others. |
| + def _TrimTimes(self, times, percent): |
| + """Return a new list with |percent| number of times trimmed for each page. |
| - Args: |
| - dirname: The directory containing the page cycler test. |
| - iterations: How many times to run through the set of pages. |
| - description: A string description for the particular test being run. |
| + Removes the largest and smallest values. |
| """ |
| - self._PreReadDir(os.path.join(self.PC_PATH, dirname)) |
| + iterations = len(times) |
| + times = sorted(times) |
| + logging.debug('Before trimming %d: %s' % num_to_drop, times) |
| + |
| + num_to_trim = int(iterations * float(percent) / 100.0) |
| + a = num_to_drop / 2 |
| + b = iterations - (num_to_trim / 2 + num_to_trim % 2) |
| + trimmed_times = times[a:b] |
| + logging.debug('After trimming: %s', trimmed_times) |
| + return trimmed_times |
| + |
| + def _GetArithmeticMean(self, values): |
|
tonyg
2012/05/18 15:37:54
These math helper functions shouldn't be named as
slamm_google
2012/05/18 23:59:28
I do not see any collection of functions in this d
|
| + """Return the arithmetic mean of |values|.""" |
| + return sum(values) / len(values) |
| + |
| + def _GetGeometricMean(self, values): |
| + """Return the geometric mean of |values|.""" |
| + return reduce(lambda x, y: x * y, values) ** (1.0 / len(values)) |
| + |
| + def _ComputeFinalResult(self, times, num_pages, iterations): |
| + """The final score that is calculated is a geometric mean of the |
| + arithmetic means of each page's load time, and we drop the |
| + upper/lower 20% of the times for each page so they don't skew the |
| + mean. The geometric mean is used for the final score because the |
| + time range for any given site may be very different, and we don't |
| + want slower sites to weight more heavily than others. |
| + """ |
| + page_means = [ |
| + self._GetArithmeticMean(self._TrimTimes(times, percent=20)) |
| + for times in self._IteratePageTimes(times, num_pages, iterations)] |
| + return self._GetGeometricMean(page_means) |
| - url = self.GetFileURLForDataPath(os.path.join(self.PC_PATH, dirname), |
| - 'start.html') |
| + def _StartUrl(self, test_name, iterations): |
| + """Return the URL to used to start the test. |
| - self.NavigateToURL('%s?auto=1&iterations=%d' % (url, iterations)) |
| + Subclasses must implement this. |
| + """ |
| + raise NotImplemented |
| - # Check cookies for "__pc_done=1" to know the test is over. |
| - def IsTestDone(): |
| - cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0. |
| - return '__pc_done=1' in cookies |
| + def _RunPageCyclerTest(self, name, description): |
| + """Runs the specified PageCycler test. |
|
tonyg
2012/05/18 15:37:54
nit: extra line break
slamm_google
2012/05/18 23:59:28
Done.
|
| - self.assertTrue( |
| - self.WaitUntil(IsTestDone, timeout=(60 * iterations), retry_sleep=1), |
| - msg='Timed out waiting for page cycler test to complete.') |
| - # Collect the results from the cookies. |
| - site_to_time_list = {} |
| - cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0. |
| - site_list = '' |
| - time_list = '' |
| - for cookie in cookies.split(';'): |
| - if '__pc_pages' in cookie: |
| - site_list = cookie[cookie.find('=') + 1:] |
| - elif '__pc_timings' in cookie: |
| - time_list = cookie[cookie.find('=') + 1:] |
| - self.assertTrue(site_list and time_list, |
| - msg='Could not find test results in cookies: %s' % cookies) |
| - site_list = site_list.split(',') |
| - time_list = time_list.split(',') |
| - self.assertEqual(iterations, len(time_list) / len(site_list), |
| - msg='Iteration count %d does not match with site/timing ' |
| - 'lists: %s and %s' % (iterations, site_list, time_list)) |
| - for site_index, site in enumerate(site_list): |
| - site_to_time_list[site] = [] |
| - for iteration_index in xrange(iterations): |
| - site_to_time_list[site].append( |
| - float(time_list[iteration_index * len(site_list) + site_index])) |
| - |
| - site_times = [] |
| - for site, time_list in site_to_time_list.iteritems(): |
| - sorted_times = sorted(time_list) |
| - num_to_drop = int(len(sorted_times) * 0.2) |
| - logging.debug('Before dropping %d: ' % num_to_drop) |
| - logging.debug(sorted_times) |
| - if num_to_drop: |
| - sorted_times = sorted_times[:-num_to_drop] |
| - logging.debug('After dropping:') |
| - logging.debug(sorted_times) |
| - # Do an arithmetic mean of the load times for a given page. |
| - mean_time = sum(sorted_times) / len(sorted_times) |
| - logging.debug('Mean time is: ' + str(mean_time)) |
| - site_times.append(mean_time) |
| - |
| - logging.info('site times = %s' % site_times) |
| - # Compute a geometric mean over the averages for each site. |
| - final_result = reduce(lambda x, y: x * y, |
| - site_times) ** (1.0/ len(site_times)) |
| + Args: |
| + name: the page cycler test name (corresponds to a directory or test file) |
| + description: a string description for the test |
| + """ |
| + iterations = self._num_iterations |
| + start_url = self._StartUrl(name, iterations) |
| + self.NavigateToURL(start_url) |
| + self._WaitUntilDone(start_url, iterations) |
| + pages, times = self._CollectPagesAndTimes(start_url) |
| + final_result = self._ComputeFinalResult(times, len(pages), iterations) |
| logging.info('%s page cycler final result: %f' % |
| (description, final_result)) |
| self._OutputPerfGraphValue(description + '_PageCycler', final_result, |
| 'milliseconds', graph_name='PageCycler') |
| + |
| +class PageCyclerTest(BasePageCyclerTest): |
| + """Tests to run various page cyclers.""" |
| + |
| + def _PreReadDataDir(self, subdir): |
| + """This recursively reads all of the files in a given url directory. |
| + |
| + The intent is to get them into memory before they are used by the benchmark. |
| + |
| + Args: |
| + subdir: a subdirectory of the page cycler data directory. |
| + """ |
| + def _PreReadDir(dirname, names): |
| + for rfile in names: |
| + with open(os.path.join(dirname, rfile)) as fp: |
| + fp.read() |
| + for root, dirs, files in os.walk(self.DataPath(subdir)): |
| + _PreReadDir(root, files) |
| + |
| + def _StartUrl(self, test_name, iterations): |
| + return self.GetFileURLForDataPath( |
| + self.DataPath(test_name), |
| + 'start.html?auto=1&iterations=%d' % iterations) |
| + |
| + def _RunPageCyclerTest(self, dirname, description): |
| + """Runs the specified PageCycler test. |
| + |
| + Args: |
| + dirname: directory containing the page cycler test |
| + description: a string description for the test |
| + """ |
| + self._PreReadDataDir('common') |
| + self._PreReadDataDir(dirname) |
| + super(PageCyclerTest, self)._RunPageCyclerTest(dirname, description) |
| + |
| def testMoreJSFile(self): |
| - self._RunPageCyclerTest('morejs', self._num_iterations, 'MoreJSFile') |
| + self._RunPageCyclerTest('morejs', 'MoreJSFile') |
| def testAlexaFile(self): |
| - self._RunPageCyclerTest('alexa_us', self._num_iterations, 'Alexa_usFile') |
| + self._RunPageCyclerTest('alexa_us', 'Alexa_usFile') |
| def testBloatFile(self): |
| - self._RunPageCyclerTest('bloat', self._num_iterations, 'BloatFile') |
| + self._RunPageCyclerTest('bloat', 'BloatFile') |
| def testDHTMLFile(self): |
| - self._RunPageCyclerTest('dhtml', self._num_iterations, 'DhtmlFile') |
| + self._RunPageCyclerTest('dhtml', 'DhtmlFile') |
| def testIntl1File(self): |
| - self._RunPageCyclerTest('intl1', self._num_iterations, 'Intl1File') |
| + self._RunPageCyclerTest('intl1', 'Intl1File') |
| def testIntl2File(self): |
| - self._RunPageCyclerTest('intl2', self._num_iterations, 'Intl2File') |
| + self._RunPageCyclerTest('intl2', 'Intl2File') |
| def testMozFile(self): |
| - self._RunPageCyclerTest('moz', self._num_iterations, 'MozFile') |
| + self._RunPageCyclerTest('moz', 'MozFile') |
| def testMoz2File(self): |
| - self._RunPageCyclerTest('moz2', self._num_iterations, 'Moz2File') |
| + self._RunPageCyclerTest('moz2', 'Moz2File') |
| + |
| + |
| +class WebPageReplayPageCyclerTest(BasePageCyclerTest): |
|
tonyg
2012/05/18 15:37:54
Please add a class level docstring.
slamm_google
2012/05/18 23:59:28
Done.
|
| + _TEST_EXE_NAME = 'perf_py' |
| + _IS_DNS_FORWARDED = False |
| + |
| + def ChromiumPaths(self, test_name=None): |
| + return webpagereplay.ChromiumPaths( |
| + TEST_EXE_NAME=self._TEST_EXE_NAME, TEST_NAME=test_name) |
| + |
| + def ExtraChromeFlags(self): |
| + """Ensures Chrome is launched with custom flags. |
| + |
| + Returns: |
| + A list of extra flags to pass to Chrome when it is launched. |
| + """ |
| + flags = super(WebPageReplayPageCyclerTest, self).ExtraChromeFlags() |
| + chromium_paths = self.ChromiumPaths() |
| + webpagereplay.ChromeFlags( |
| + flags, |
| + extension_path=chromium_paths['extension'], |
| + is_dns_forwarded=self._IS_DNS_FORWARDED) |
| + return flags |
| + |
| + def _StartUrl(self, test_name, iterations): |
| + chromium_paths = self.ChromiumPaths(test_name=test_name) |
| + return chromium_paths.GetStartUrl(iterations, use_auto=True) |
| + |
| + def _RunPageCyclerTest(self, name, description): |
| + """Runs the specified PageCycler test. |
| + |
| + Args: |
| + name: name for archive and config files: |name|.wpr and |name|.js. |
| + description: a string description for the test |
| + """ |
| + chromium_paths = self.ChromiumPaths(test_name=name) |
| + replay_options = [] |
| + if not self._IS_DNS_FORWARDED: |
| + replay_options.append('--no-dns_forwarding') |
| + is_record_mode = False # TODO(slamm): get from environment variable? |
| + if is_record_mode: |
| + replay_options.append('--record') |
| + with webpagereplay.ReplayServer( |
| + chromium_paths['replay'], |
| + chromium_paths.GetArchivePath(), |
| + chromium_paths['logs'], |
| + replay_options): |
| + super_self = super(WebPageReplayPageCyclerTest, self) |
| + super_self._RunPageCyclerTest(name, description) |
| + |
|
tonyg
2012/05/18 15:37:54
nit: extra line break
slamm_google
2012/05/18 23:59:28
Done.
|
| + |
| + def testWpr2012Q2(self): |
| + self._RunPageCyclerTest('2012Q2', 'Wpr2012Q2') |
|
dennis_jeffrey
2012/05/18 00:28:41
Is this expected to be able to run on ChromeOS?
B
slamm_google
2012/05/18 23:59:29
Thanks for the heads-up.
Does this python code r
Sonny
2012/05/19 00:50:10
It runs directly on ChromeOS, and as Dennis mentio
|
| class MemoryTest(BasePerfTest): |