Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7382)

Unified Diff: chrome/test/functional/perf.py

Issue 10411011: Add Web Page Replay enabled page cycler tests to pyauto. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Review fixes & first working version. Created 8 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | chrome/test/functional/webpagereplay.py » ('j') | chrome/test/functional/webpagereplay.py » ('J')
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: chrome/test/functional/perf.py
diff --git a/chrome/test/functional/perf.py b/chrome/test/functional/perf.py
index fd299e0ef73bfd9faba408db35ca1c8712315797..c8b811c5a7e259e25d7d2477edb64013f358580e 100755
--- a/chrome/test/functional/perf.py
+++ b/chrome/test/functional/perf.py
@@ -48,9 +48,24 @@ import simplejson # Must be imported after pyauto; located in third_party.
from netflix import NetflixTestHelper
import pyauto_utils
import test_utils
+import webpagereplay
from youtube import YoutubeTestHelper
+def Mean(values):
+ """Return the arithmetic mean of |values|."""
+ return sum(values) / float(len(values))
tonyg 2012/05/19 00:46:36 Needs a base case for 0-length |values|.
slamm_google 2012/05/21 22:22:06 Done. I also added a check for 'None' values.
+
+
+def GeometricMean(values):
+ """Return the geometric mean of |values|."""
+ if not values or [x for x in values if x < 0.0]:
+ return None
+ if 0.0 in values:
+ return 0.0
+ return math.exp(Mean([math.log(x) for x in values]))
+
+
class BasePerfTest(pyauto.PyUITest):
"""Base class for performance tests."""
@@ -1733,12 +1748,20 @@ class LiveGamePerfTest(BasePerfTest):
'AngryBirds', 'angry_birds')
-class PageCyclerTest(BasePerfTest):
- """Tests to run various page cyclers."""
+class BasePageCyclerTest(BasePerfTest):
+ """Page class for page cycler tests.
+
+ Derived classes must implement StartUrl().
+ """
+ TRIM_PERCENT = 20
# Page Cycler lives in src/data/page_cycler rather than src/chrome/test/data
- PC_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir,
- os.pardir, 'data', 'page_cycler')
+ DATA_PATH = os.path.join(BasePerfTest.DataDir(), os.pardir, os.pardir,
+ os.pardir, 'data', 'page_cycler')
+
+ @classmethod
+ def DataPath(cls, subdir):
+ return os.path.join(cls.DATA_PATH, subdir)
def ExtraChromeFlags(self):
"""Ensures Chrome is launched with custom flags.
@@ -1750,129 +1773,265 @@ class PageCyclerTest(BasePerfTest):
# The first two are needed for the test.
# The plugins argument is to prevent bad scores due to pop-ups from
# running an old version of something (like Flash).
- return (super(PageCyclerTest, self).ExtraChromeFlags() +
+ return (super(BasePageCyclerTest, self).ExtraChromeFlags() +
['--js-flags="--expose_gc"',
'--enable-file-cookies',
'--allow-outdated-plugins'])
- def _PreReadDir(self, dir):
- """This recursively reads all of the files in a given url directory.
+ def WaitUntilDone(self, url, iterations):
+ """Check cookies for "__pc_done=1" to know the test is over."""
+ def IsDone():
+ cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
+ return '__pc_done=1' in cookies
+ self.assertTrue(
+ self.WaitUntil(IsDone, timeout=(60 * iterations), retry_sleep=1),
+ msg='Timed out waiting for page cycler test to complete.')
- The intent is to get them into memory before they are used by the benchmark.
- """
- def _PreReadDir(dirname, names):
- for rfile in names:
- with open(os.path.join(dirname, rfile)) as fp:
- fp.read()
+ def CollectPagesAndTimes(self, url):
+ """Collect the results from the cookies."""
+ pages, times = None, None
+ cookies = self.GetCookie(pyauto.GURL(url)) # window 0, tab 0
+ for cookie in cookies.split(';'):
+ if '__pc_pages' in cookie:
+ pages_str = cookie.split('=', 1)[1]
+ pages = pages_str.split(',')
+ elif '__pc_timings' in cookie:
+ times_str = cookie.split('=', 1)[1]
+ times = [float(t) for t in times_str.split(',')]
+ self.assertTrue(pages and times,
+ msg='Unable to find test results in cookies: %s' % cookies)
+ return pages, times
- for root, dirs, files in os.walk(os.path.dirname(dir)):
- _PreReadDir(root, files)
+ def IteratePageTimes(self, times, num_pages, iterations):
+ """Regroup the times by the page.
- def setUp(self):
- self._PreReadDir(os.path.join(self.PC_PATH, 'common'))
- BasePerfTest.setUp(self)
+ Args:
+ times: e.g. [page1_iter1, page1_iter2, ..., page2_iter1, page2_iter2, ...]
+ num_pages: the number of pages
+ iterations: the number of times for each page
+ Yields:
+ times for one page: [page1_iter1, page1_iter2, ...]
+ """
+ expected_num_times = num_pages * iterations
+ self.assertEqual(
+ expected_num_times, len(times),
+ msg=('len(times) != num_pages * iterations: %s != %s * %s, times=%s' %
+ (len(times), num_pages, iterations, times)))
- def _RunPageCyclerTest(self, dirname, iterations, description):
- """Runs the specified PageCycler test.
+ next_time = iter(times).next
+ for _ in range(num_pages):
+ yield [next_time() for _ in range(iterations)]
- The final score that is calculated is a geometric mean of the
- arithmetic means of each site's load time, and we drop the upper
- 20% of the times for each site so they don't skew the mean.
- The Geometric mean is used for the final score because the time
- range for any given site may be very different, and we don't want
- slower sites to weight more heavily than others.
+ def TrimTimes(self, times, percent):
+ """Return a new list with |percent| number of times trimmed for each page.
- Args:
- dirname: The directory containing the page cycler test.
- iterations: How many times to run through the set of pages.
- description: A string description for the particular test being run.
+ Removes the largest and smallest values.
"""
- self._PreReadDir(os.path.join(self.PC_PATH, dirname))
-
- url = self.GetFileURLForDataPath(os.path.join(self.PC_PATH, dirname),
- 'start.html')
+ iterations = len(times)
+ times = sorted(times)
+ num_to_trim = int(iterations * float(percent) / 100.0)
+ logging.debug('Before trimming %d: %s' % (num_to_trim, times))
+ a = num_to_trim / 2
+ b = iterations - (num_to_trim / 2 + num_to_trim % 2)
+ trimmed_times = times[a:b]
+ logging.debug('After trimming: %s', trimmed_times)
+ return trimmed_times
+
+ def ComputeFinalResult(self, times, num_pages, iterations):
+ """The final score that is calculated is a geometric mean of the
+ arithmetic means of each page's load time, and we drop the
+ upper/lower 20% of the times for each page so they don't skew the
+ mean. The geometric mean is used for the final score because the
+ time range for any given site may be very different, and we don't
+ want slower sites to weight more heavily than others.
+ """
+ page_means = [
+ Mean(self.TrimTimes(times, percent=self.TRIM_PERCENT))
+ for times in self.IteratePageTimes(times, num_pages, iterations)]
+ return GeometricMean(page_means)
- self.NavigateToURL('%s?auto=1&iterations=%d' % (url, iterations))
+ def StartUrl(self, test_name, iterations):
+ """Return the URL to used to start the test.
- # Check cookies for "__pc_done=1" to know the test is over.
- def IsTestDone():
- cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0.
- return '__pc_done=1' in cookies
+ Derived must implement this.
+ """
+ raise NotImplemented
- self.assertTrue(
- self.WaitUntil(IsTestDone, timeout=(60 * iterations), retry_sleep=1),
- msg='Timed out waiting for page cycler test to complete.')
+ def RunPageCyclerTest(self, name, description):
+ """Runs the specified PageCycler test.
- # Collect the results from the cookies.
- site_to_time_list = {}
- cookies = self.GetCookie(pyauto.GURL(url)) # Window 0, tab 0.
- site_list = ''
- time_list = ''
- for cookie in cookies.split(';'):
- if '__pc_pages' in cookie:
- site_list = cookie[cookie.find('=') + 1:]
- elif '__pc_timings' in cookie:
- time_list = cookie[cookie.find('=') + 1:]
- self.assertTrue(site_list and time_list,
- msg='Could not find test results in cookies: %s' % cookies)
- site_list = site_list.split(',')
- time_list = time_list.split(',')
- self.assertEqual(iterations, len(time_list) / len(site_list),
- msg='Iteration count %d does not match with site/timing '
- 'lists: %s and %s' % (iterations, site_list, time_list))
- for site_index, site in enumerate(site_list):
- site_to_time_list[site] = []
- for iteration_index in xrange(iterations):
- site_to_time_list[site].append(
- float(time_list[iteration_index * len(site_list) + site_index]))
-
- site_times = []
- for site, time_list in site_to_time_list.iteritems():
- sorted_times = sorted(time_list)
- num_to_drop = int(len(sorted_times) * 0.2)
- logging.debug('Before dropping %d: ' % num_to_drop)
- logging.debug(sorted_times)
- if num_to_drop:
- sorted_times = sorted_times[:-num_to_drop]
- logging.debug('After dropping:')
- logging.debug(sorted_times)
- # Do an arithmetic mean of the load times for a given page.
- mean_time = sum(sorted_times) / len(sorted_times)
- logging.debug('Mean time is: ' + str(mean_time))
- site_times.append(mean_time)
-
- logging.info('site times = %s' % site_times)
- # Compute a geometric mean over the averages for each site.
- final_result = reduce(lambda x, y: x * y,
- site_times) ** (1.0/ len(site_times))
+ Args:
+ name: the page cycler test name (corresponds to a directory or test file)
+ description: a string description for the test
+ """
+ iterations = self._num_iterations
+ start_url = self.StartUrl(name, iterations)
+ self.NavigateToURL(start_url)
+ self.WaitUntilDone(start_url, iterations)
+ pages, times = self.CollectPagesAndTimes(start_url)
+ final_result = self.ComputeFinalResult(times, len(pages), iterations)
logging.info('%s page cycler final result: %f' %
(description, final_result))
self._OutputPerfGraphValue(description + '_PageCycler', final_result,
'milliseconds', graph_name='PageCycler')
+
+class PageCyclerTest(BasePageCyclerTest):
+ """Tests to run various page cyclers."""
+
+ def _PreReadDataDir(self, subdir):
+ """This recursively reads all of the files in a given url directory.
+
+ The intent is to get them into memory before they are used by the benchmark.
+
+ Args:
+ subdir: a subdirectory of the page cycler data directory.
+ """
+ def _PreReadDir(dirname, names):
+ for rfile in names:
+ with open(os.path.join(dirname, rfile)) as fp:
+ fp.read()
+ for root, dirs, files in os.walk(self.DataPath(subdir)):
+ _PreReadDir(root, files)
+
+ def StartUrl(self, test_name, iterations):
+ return self.GetFileURLForDataPath(
+ self.DataPath(test_name),
+ 'start.html?auto=1&iterations=%d' % iterations)
+
+ def RunPageCyclerTest(self, dirname, description):
+ """Runs the specified PageCycler test.
+
+ Args:
+ dirname: directory containing the page cycler test
+ description: a string description for the test
+ """
+ self._PreReadDataDir('common')
+ self._PreReadDataDir(dirname)
+ super(PageCyclerTest, self).RunPageCyclerTest(dirname, description)
+
def testMoreJSFile(self):
- self._RunPageCyclerTest('morejs', self._num_iterations, 'MoreJSFile')
+ self.RunPageCyclerTest('morejs', 'MoreJSFile')
def testAlexaFile(self):
- self._RunPageCyclerTest('alexa_us', self._num_iterations, 'Alexa_usFile')
+ self.RunPageCyclerTest('alexa_us', 'Alexa_usFile')
def testBloatFile(self):
- self._RunPageCyclerTest('bloat', self._num_iterations, 'BloatFile')
+ self.RunPageCyclerTest('bloat', 'BloatFile')
def testDHTMLFile(self):
- self._RunPageCyclerTest('dhtml', self._num_iterations, 'DhtmlFile')
+ self.RunPageCyclerTest('dhtml', 'DhtmlFile')
def testIntl1File(self):
- self._RunPageCyclerTest('intl1', self._num_iterations, 'Intl1File')
+ self.RunPageCyclerTest('intl1', 'Intl1File')
def testIntl2File(self):
- self._RunPageCyclerTest('intl2', self._num_iterations, 'Intl2File')
+ self.RunPageCyclerTest('intl2', 'Intl2File')
def testMozFile(self):
- self._RunPageCyclerTest('moz', self._num_iterations, 'MozFile')
+ self.RunPageCyclerTest('moz', 'MozFile')
def testMoz2File(self):
- self._RunPageCyclerTest('moz2', self._num_iterations, 'Moz2File')
+ self.RunPageCyclerTest('moz2', 'Moz2File')
+
+
+class WebPageReplayPageCyclerTest(BasePageCyclerTest):
+ """Tests to run Web Page Replay backed page cycler tests.
+
+ Web Page Replay is a proxy that can record and "replay" web pages with
+ simulated network characteristics -- without having to edit the pages
+ by hand. With WPR, tests can use "real" web content, and catch
+ performance issues that may result from introducing network delays and
+ bandwidth throttling.
+ """
+ _PATHS = {
+ 'archives': 'src/data/page_cycler/webpagereplay',
+ 'wpr': 'src/data/page_cycler/webpagereplay/{test_name}.wpr',
+ 'wpr_pub': 'src/tools/page_cycler/webpagereplay/tests/{test_name}.wpr',
+ 'start_page': 'src/tools/page_cycler/webpagereplay/start.html',
+ 'extension': 'src/tools/page_cycler/webpagereplay/extension',
+ 'replay': 'src/third_party/webpagereplay',
+ 'logs': 'src/webpagereplay_logs',
+ }
+
+ _BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..', '..', '..', '..'))
+ _IS_DNS_FORWARDED = False
+
+ @classmethod
+ def _Path(cls, key, **kwargs):
+ """Provide paths for page cycler tests with Web Page Replay."""
+ chromium_path = cls._PATHS[key].format(**kwargs)
+ return os.path.join(cls._BASE_DIR, *chromium_path.split('/'))
+
+ @classmethod
+ def _ArchivePath(cls, test_name):
+ has_private_archives = os.path.exists(cls._Path('archives'))
+ key = 'wpr' if has_private_archives else 'wpr_pub'
+ return cls._Path(key, test_name=test_name)
+
+ def ExtraChromeFlags(self):
+ """Ensures Chrome is launched with custom flags.
+
+ Returns:
+ A list of extra flags to pass to Chrome when it is launched.
+ """
+ flags = super(WebPageReplayPageCyclerTest, self).ExtraChromeFlags()
+ flags.append('--load-extension=%s' % self._Path('extension'))
+ if not self._IS_DNS_FORWARDED:
+ flags.append('--host-resolver-rules=MAP * %s' % webpagereplay.REPLAY_HOST)
+ flags.extend([
+ '--testing-fixed-http-port=%s' % webpagereplay.HTTP_PORT,
+ '--testing-fixed-https-port=%s' % webpagereplay.HTTPS_PORT,
+ '--log-level=0',
+ ])
+ extra_flags = [
+ '--disable-background-networking',
+ '--enable-experimental-extension-apis',
+ '--enable-logging',
+ '--enable-stats-table',
+ '--enable-benchmarking',
+ '--ignore-certificate-errors',
+ '--metrics-recording-only',
+ '--activate-on-launch',
+ '--no-first-run',
+ '--no-proxy-server',
+ ]
+ flags.extend(f for f in extra_flags if f not in flags)
+ return flags
+
+ def StartUrl(self, test_name, iterations):
+ start_url = 'file://%s?test=%s' % (self._Path('start_page'), test_name)
+ if iterations is not None:
+ start_url += '&iterations=%d' % iterations
+ use_auto = True # TODO(slamm): get from env?
tonyg 2012/05/19 00:46:36 Sounds reasonable to get from env. Other things in
slamm_google 2012/05/21 22:22:06 Done. The code looks for 'PC_NO_AUTO' and 'PC_REC
+ if use_auto:
+ start_url += '&auto=1'
+ return start_url
+
+ def RunPageCyclerTest(self, test_name, description):
+ """Runs the specified PageCycler test.
+
+ Args:
+ test_name: name for archive (.wpr) and config (.js) files.
+ description: a string description for the test
+ """
+ replay_options = []
+ if not self._IS_DNS_FORWARDED:
+ replay_options.append('--no-dns_forwarding')
+ is_record_mode = False # TODO(slamm): get from environment variable?
+ if is_record_mode:
+ replay_options.append('--record')
+ with webpagereplay.ReplayServer(
+ self._Path('replay'),
+ self._ArchivePath(test_name),
+ self._Path('logs'),
+ replay_options):
+ super_self = super(WebPageReplayPageCyclerTest, self)
+ super_self.RunPageCyclerTest(test_name, description)
+
+ def test2012Q2(self):
+ self.RunPageCyclerTest('2012Q2', '2012Q2')
class MemoryTest(BasePerfTest):
« no previous file with comments | « no previous file | chrome/test/functional/webpagereplay.py » ('j') | chrome/test/functional/webpagereplay.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698