/
perf_test_driver.py
125 lines (100 loc) · 4.24 KB
/
perf_test_driver.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#!/usr/bin/env python
# ===--- perf_test_driver.py ----------------------------------------------===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ===----------------------------------------------------------------------===//
import os
import sys
import subprocess
import multiprocessing
import re
class Result(object):
def __init__(self, name, status, output, xfail_list):
self.name = name
self.status = status
self.output = output
self.is_xfailed = any((re.match(x, self.name) is not None for x in xfail_list))
def is_failure(self):
return self.get_result() in ['FAIL', 'XPASS']
def get_result(self):
if self.is_xfailed:
if self.status:
return 'XFAIL'
return 'XPASS'
if self.status:
return 'FAIL'
return 'PASS'
def get_name(self):
return self.name
def get_status(self):
return self.status
def get_output(self):
return self.output
def get_data(self):
return self.data
def merge_in_extra_data(self, d):
"""Rather than modifying the extra data dict, just return it as a no-op"""
return d
def print_data(self, max_test_len):
fmt = '{:<%d}{:}' % (max_test_len + 5)
print(fmt.format(self.get_name(), self.get_result()))
def _unwrap_self(args):
return type(args[0]).process_input(*args)
PerfTestDriver_OptLevels = ['Onone', 'O', 'Ounchecked']
class PerfTestDriver(object):
def __init__(self, binary_dir, xfail_list, enable_parallel=False, opt_levels=PerfTestDriver_OptLevels):
self.targets = [(os.path.join(binary_dir, 'PerfTests_%s' % o), o) for o in opt_levels]
self.xfail_list = xfail_list
self.enable_parallel = enable_parallel
self.data = None
def print_data_header(self, max_test_len):
fmt = '{:<%d}{:}' % (max_test_len + 5)
print(fmt.format('Name', 'Result'))
def prepare_input(self, name, opt_level):
raise RuntimeError("Abstract method")
def process_input(self, data):
raise RuntimeError("Abstract method")
def run_for_opt_level(self, binary, opt_level, test_filter):
print("testing driver at path: %s" % binary)
names = [n.strip() for n in subprocess.check_output([binary, "--list"]).split()[2:]]
if test_filter:
regex = re.compile(test_filter)
names = [n for n in names if regex.match(n)]
def prepare_input_wrapper(name):
x = {'opt': opt_level, 'path': binary, 'test_name': name}
x.update(self.prepare_input(name))
return x
prepared_input = [prepare_input_wrapper(n) for n in names]
results = None
if self.enable_parallel:
p = multiprocessing.Pool()
z = zip([self]*len(prepared_input), prepared_input)
results = p.map(_unwrap_self, z)
else:
results = map(self.process_input, prepared_input)
def reduce_results(acc, r):
acc['result'].append(r)
acc['has_failure'] = acc['has_failure'] or r.is_failure()
acc['max_test_len'] = max(acc['max_test_len'], len(r.get_name()))
acc['extra_data'] = r.merge_in_extra_data(acc['extra_data'])
return acc
return reduce(reduce_results, results, {'result': [], 'has_failure': False, 'max_test_len': 0, 'extra_data': {}})
def print_data(self, data, max_test_len):
print("Results:")
self.print_data_header(max_test_len)
for d in data:
for r in d['result']:
r.print_data(max_test_len)
def run(self, test_filter=None):
self.data = [self.run_for_opt_level(binary, opt_level, test_filter) for binary, opt_level in self.targets]
max_test_len = reduce(max, [d['max_test_len']for d in self.data])
has_failure = reduce(max, [d['has_failure']for d in self.data])
self.print_data(self.data, max_test_len)
return not has_failure