OLD | NEW |
---|---|
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
3 # Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file | 3 # Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
4 # for details. All rights reserved. Use of this source code is governed by a | 4 # for details. All rights reserved. Use of this source code is governed by a |
5 # BSD-style license that can be found in the LICENSE file. | 5 # BSD-style license that can be found in the LICENSE file. |
6 | 6 |
7 import datetime | 7 import datetime |
8 import optparse | 8 import optparse |
9 import os | 9 import os |
10 from os.path import dirname, abspath | 10 from os.path import dirname, abspath |
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
465 'benchmark_page_%s.html' % version) | 465 'benchmark_page_%s.html' % version) |
466 self.test.test_runner.run_cmd( | 466 self.test.test_runner.run_cmd( |
467 ['python', os.path.join('tools', 'testing', 'run_selenium.py'), | 467 ['python', os.path.join('tools', 'testing', 'run_selenium.py'), |
468 '--out', file_path, '--browser', browser, | 468 '--out', file_path, '--browser', browser, |
469 '--timeout', '600', '--mode', 'perf'], self.test.trace_file, | 469 '--timeout', '600', '--mode', 'perf'], self.test.trace_file, |
470 append=True) | 470 append=True) |
471 | 471 |
472 class CommonBrowserFileProcessor(Processor): | 472 class CommonBrowserFileProcessor(Processor): |
473 def process_file(self, afile): | 473 def process_file(self, afile): |
474 """Comb through the html to find the performance results. | 474 """Comb through the html to find the performance results. |
475 Returns: True if we successfullly posted our data to storage.""" | 475 Returns: True if we successfullly posted our data to storage and/or we can |
vsm
2012/04/27 22:43:06
Extra l in successfully :-)
Emily Fortuna
2012/04/27 23:00:58
Done.
| |
476 delete the trace file.""" | |
476 os.chdir(os.path.join(DART_INSTALL_LOCATION, 'tools', | 477 os.chdir(os.path.join(DART_INSTALL_LOCATION, 'tools', |
477 'testing', 'perf_testing')) | 478 'testing', 'perf_testing')) |
478 if self.test.test_runner.no_upload: | 479 if self.test.test_runner.no_upload: |
479 return | 480 return |
480 parts = afile.split('-') | 481 parts = afile.split('-') |
481 browser = parts[2] | 482 browser = parts[2] |
482 version = parts[3] | 483 version = parts[3] |
483 f = open(os.path.join(self.test.result_folder_name, afile)) | 484 f = open(os.path.join(self.test.result_folder_name, afile)) |
484 lines = f.readlines() | 485 lines = f.readlines() |
485 line = '' | 486 line = '' |
486 i = 0 | 487 i = 0 |
487 revision_num = 0 | 488 revision_num = 0 |
488 while '<div id="results">' not in line and i < len(lines): | 489 while '<div id="results">' not in line and i < len(lines): |
489 if 'Revision' in line: | 490 if 'Revision' in line: |
490 revision_num = int(line.split()[1].strip('"')) | 491 revision_num = int(line.split()[1].strip('"')) |
491 line = lines[i] | 492 line = lines[i] |
492 i += 1 | 493 i += 1 |
493 | 494 |
494 if i >= len(lines) or revision_num == 0: | 495 if i >= len(lines) or revision_num == 0: |
495 # Then this run did not complete. Ignore this tracefile. | 496 # Then this run did not complete. Ignore this tracefile. |
496 return | 497 return True |
497 | 498 |
498 line = lines[i] | 499 line = lines[i] |
499 i += 1 | 500 i += 1 |
500 results = [] | 501 results = [] |
501 if line.find('<br>') > -1: | 502 if line.find('<br>') > -1: |
502 results = line.split('<br>') | 503 results = line.split('<br>') |
503 else: | 504 else: |
504 results = line.split('<br />') | 505 results = line.split('<br />') |
505 upload_success = True | 506 upload_success = True |
506 for result in results: | 507 for result in results: |
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
795 result = re.match(result_pattern, line.strip()) | 796 result = re.match(result_pattern, line.strip()) |
796 if result: | 797 if result: |
797 variant = result.group(1) | 798 variant = result.group(1) |
798 metric = result.group(2) | 799 metric = result.group(2) |
799 num = result.group(3) | 800 num = result.group(3) |
800 if num.find('.') == -1: | 801 if num.find('.') == -1: |
801 num = int(num) | 802 num = int(num) |
802 else: | 803 else: |
803 num = float(num) | 804 num = float(num) |
804 upload_success = upload_success and self.report_results( | 805 upload_success = upload_success and self.report_results( |
805 metric, num, 'browser', variant, revision_num, self.CODE_SIZE) | 806 metric, num, 'commandline', variant, revision_num, self.CODE_SIZE) |
806 | 807 |
807 f.close() | 808 f.close() |
808 return upload_success | 809 return upload_success |
809 | 810 |
810 | 811 |
811 class CompileTimeAndSizeTest(Test): | 812 class CompileTimeAndSizeTest(Test): |
812 """Run tests to determine how long frogc takes to compile, and the compiled | 813 """Run tests to determine how long frogc takes to compile, and the compiled |
813 file output size of some benchmarking files.""" | 814 file output size of some benchmarking files.""" |
814 def __init__(self, test_runner): | 815 def __init__(self, test_runner): |
815 """Reference to the test_runner object that notifies us when to begin | 816 """Reference to the test_runner object that notifies us when to begin |
(...skipping 22 matching lines...) Expand all Loading... | |
838 os.chdir('frog') | 839 os.chdir('frog') |
839 self.test.trace_file = os.path.join( | 840 self.test.trace_file = os.path.join( |
840 '..', 'tools', 'testing', 'perf_testing', | 841 '..', 'tools', 'testing', 'perf_testing', |
841 self.test.result_folder_name, | 842 self.test.result_folder_name, |
842 self.test.result_folder_name + self.test.cur_time) | 843 self.test.result_folder_name + self.test.cur_time) |
843 | 844 |
844 self.add_svn_revision_to_trace(self.test.trace_file) | 845 self.add_svn_revision_to_trace(self.test.trace_file) |
845 | 846 |
846 self.test.test_runner.run_cmd( | 847 self.test.test_runner.run_cmd( |
847 [self.test.dart_vm, 'frogc.dart', '--out=swarm-result', | 848 [self.test.dart_vm, 'frogc.dart', '--out=swarm-result', |
848 os.path.join('..', 'samples', 'swarm', | 849 os.path.join('..', 'internal', 'golem', 'benchmarks-dart2js', 'tests', |
849 'swarm.dart')]) | 850 'samples-r6461', 'swarm', 'swarm.dart')]) |
850 | 851 |
851 swarm_size = 0 | 852 swarm_size = 0 |
852 try: | 853 try: |
853 swarm_size = os.path.getsize('swarm-result') | 854 swarm_size = os.path.getsize('swarm-result') |
854 except OSError: | 855 except OSError: |
855 pass #If compilation failed, continue on running other tests. | 856 pass #If compilation failed, continue on running other tests. |
856 | 857 |
857 self.test.test_runner.run_cmd( | 858 self.test.test_runner.run_cmd( |
858 [self.test.dart_vm, 'frogc.dart', '--out=total-result', | 859 [self.test.dart_vm, 'frogc.dart', '--out=total-result', |
859 os.path.join('..', 'samples', 'total', | 860 os.path.join('..', 'internal', 'golem', 'benchmarks-dart2js', 'tests', |
860 'client', 'Total.dart')]) | 861 'samples-r6461', 'total', 'client', 'Total.dart')]) |
vsm
2012/04/27 22:43:06
I take it you're freezing to a particular version
Emily Fortuna
2012/04/27 23:00:58
This is what AAR is doing for measuring dart2js on
| |
861 total_size = 0 | 862 total_size = 0 |
862 try: | 863 try: |
863 total_size = os.path.getsize('total-result') | 864 total_size = os.path.getsize('total-result') |
864 except OSError: | 865 except OSError: |
865 pass #If compilation failed, continue on running other tests. | 866 pass #If compilation failed, continue on running other tests. |
866 | 867 |
867 self.test.test_runner.run_cmd( | 868 self.test.test_runner.run_cmd( |
868 ['echo', '%d Generated checked swarm size' % swarm_size], | 869 ['echo', '%d Generated checked swarm size' % swarm_size], |
869 self.test.trace_file, append=True) | 870 self.test.trace_file, append=True) |
870 | 871 |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
934 while True: | 935 while True: |
935 if runner.has_new_code(): | 936 if runner.has_new_code(): |
936 runner.run_test_sequence() | 937 runner.run_test_sequence() |
937 else: | 938 else: |
938 time.sleep(200) | 939 time.sleep(200) |
939 else: | 940 else: |
940 runner.run_test_sequence() | 941 runner.run_test_sequence() |
941 | 942 |
942 if __name__ == '__main__': | 943 if __name__ == '__main__': |
943 main() | 944 main() |
OLD | NEW |