forked from graalvm/mx
-
Notifications
You must be signed in to change notification settings - Fork 0
/
mx_unittest.py
576 lines (478 loc) · 25.5 KB
/
mx_unittest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
#
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
#
from __future__ import print_function
import mx
import os
import re
import tempfile
import fnmatch
from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentTypeError, Action
from os.path import exists, join, basename, isdir, isabs
def _newest(path):
"""
Computes the newest timestamp for `path`. This is simply the timestamp of
`path` if it is a file. Otherwise, it's the timestamp of the newest file
in the `path` directory.
"""
ts = mx.TimeStampFile(path)
if isdir(path):
for dirpath, _, filenames in os.walk(path):
for filename in filenames:
if filename.endswith('.class') and filename not in ('module-info.class', 'package-info.class'):
file_ts = mx.TimeStampFile(join(dirpath, filename))
if file_ts.isNewerThan(ts):
ts = file_ts
return ts
def _read_cached_testclasses(cachesDir, jar, jdk):
"""
Reads the cached list of test classes in `jar`.
:param str cachesDir: directory containing files with cached test lists
:param JDKConfig jdk: the JDK for which the cached list of classes must be found
:return: the cached list of test classes in `jar` or None if the cache doesn't
exist or is out of date
"""
jdkVersion = '.jdk' + str(jdk.javaCompliance)
cache = join(cachesDir, basename(jar) + jdkVersion + '.testclasses')
jar_ts = _newest(jar)
if exists(cache) and mx.TimeStampFile(cache).isNewerThan(jar_ts):
# Only use the cached result if the source jar is older than the cache file
try:
with open(cache) as fp:
return [line.strip() for line in fp.readlines()]
except IOError as e:
mx.warn('Error reading from ' + cache + ': ' + str(e))
return None
def _write_cached_testclasses(cachesDir, jar, jdk, testclasses, excludedclasses):
"""
Writes `testclasses` to a cache file specific to `jar`.
:param str cachesDir: directory containing files with cached test lists
:param JDKConfig jdk: the JDK for which the cached list of classes must be written
:param list testclasses: a list of test class names
:param list excludedclasses: a list of excluded class names
"""
jdkVersion = '.jdk' + str(jdk.javaCompliance)
cache = join(cachesDir, basename(jar) + jdkVersion + '.testclasses')
exclusions = join(cachesDir, basename(jar) + jdkVersion + '.excludedclasses')
try:
with open(cache, 'w') as fp:
for classname in testclasses:
print(classname, file=fp)
with open(exclusions, 'w') as fp:
if excludedclasses:
mx.warn('Unsupported class files listed in ' + exclusions)
for classname in excludedclasses:
print(classname[1:], file=fp)
except IOError as e:
mx.warn('Error writing to ' + cache + ': ' + str(e))
def _find_classes_by_annotated_methods(annotations, dists, buildCacheDir, jdk=None):
if len(dists) == 0:
return {}
candidates = {}
# Create map from jar file to the binary suite distribution defining it
jarsToDists = {d.classpath_repr(): d for d in dists}
primarySuite = mx.primary_suite()
cachesDir = None
jarsToParse = []
if primarySuite and primarySuite != mx._mx_suite:
cachesDir = mx.ensure_dir_exists(join(primarySuite.get_output_root(), buildCacheDir))
for d in dists:
jar = d.classpath_repr()
testclasses = _read_cached_testclasses(cachesDir, jar, jdk if jdk else mx.get_jdk())
if testclasses is not None:
for classname in testclasses:
candidates[classname] = jarsToDists[jar]
else:
jarsToParse.append(jar)
if jarsToParse:
# Ensure Java support class is built
mx.build(['--no-daemon', '--dependencies', 'com.oracle.mxtool.junit'])
cp = mx.classpath(['com.oracle.mxtool.junit'] + list(jarsToDists.values()), jdk=jdk)
out = mx.LinesOutputCapture()
mx.run_java(['-cp', cp, 'com.oracle.mxtool.junit.FindClassesByAnnotatedMethods'] + annotations + jarsToParse, out=out, addDefaultArgs=False)
for line in out.lines:
parts = line.split(os.pathsep)
jar = parts[0]
reportedclasses = parts[1:] if len(parts) > 1 else []
testclasses = [c for c in reportedclasses if not c.startswith("!")]
excludedclasses = [c for c in reportedclasses if c.startswith("!")]
multi_jar_classes = [c for c in testclasses if c.startswith("META-INF")]
if multi_jar_classes:
mx.abort('mx unittest does not support multi-release jar\n ' + '\n '.join([jar + ':'] + multi_jar_classes))
if cachesDir:
_write_cached_testclasses(cachesDir, jar, jdk if jdk else mx.get_jdk(), testclasses, excludedclasses)
for classname in testclasses:
candidates[classname] = jarsToDists[jar]
return candidates
def _find_classes_with_annotations(p, pkgRoot, annotations, includeInnerClasses=False):
"""
Scan the sources of project 'p' for Java source files containing a line starting with
any element of 'annotations' (ignoring preceding whitespace) and return the list of fully
qualified class names for each Java source file matched.
"""
matches = lambda line: len([a for a in annotations if line == a or line.startswith(a + '(')]) != 0
return p.find_classes_with_matching_source_line(pkgRoot, matches, includeInnerClasses)
class _VMLauncher(object):
"""
Launcher to run the unit tests. See `set_vm_launcher` for descriptions of the parameters.
"""
def __init__(self, name, launcher, jdk):
self.name = name
self.launcher = launcher
self._jdk = jdk
def jdk(self):
if callable(self._jdk):
return self._jdk()
return self._jdk
_global_ignore_globs = []
def add_global_ignore_glob(ignore):
_global_ignore_globs.append(re.compile(fnmatch.translate(ignore)))
def _filter_test_candidates(candidates, tests):
classes = []
if len(tests) == 0:
classes = list(candidates.keys())
depsContainingTests = set(candidates.values())
else:
depsContainingTests = set()
found = False
if len(tests) == 1 and '#' in tests[0]:
words = tests[0].split('#')
if len(words) != 2:
mx.abort("Method specification is class#method: " + tests[0])
t, method = words
for c, p in candidates.items():
# prefer exact matches first
if t == c:
found = True
classes.append(c)
depsContainingTests.add(p)
if not found:
for c, p in candidates.items():
if t in c:
found = True
classes.append(c)
depsContainingTests.add(p)
if not found:
mx.abort('no tests matched by substring: ' + t + ' (did you forget to run "mx build"?)')
elif len(classes) != 1:
mx.abort('More than one test matches substring {0} {1}'.format(t, classes))
classes = [c + "#" + method for c in classes]
else:
for t in tests:
if '#' in t:
mx.abort('Method specifications can only be used in a single test: ' + t)
for c, p in candidates.items():
if t in c:
found = True
classes.append(c)
depsContainingTests.add(p)
if not found:
mx.abort('no tests matched by substring: ' + t + ' (did you forget to run "mx build"?)')
return classes, depsContainingTests
def find_test_candidates(annotations, suite, jdk, buildCacheDir='unittest'):
"""
Finds all classes containing methods annotated with one of the supplied annotations.
To speed up subsequent invocations, the results are cached in the `buildCacheDir`.
:param list annotations: a list of annotations to recognize test methods, e.g. ['@Test', '@Parameters']
:param suite: the mx suite in which to look for test classes. If no suite is given, the primary suite is used.
:param JDKConfig jdk: the JDK for which the list of classes must be found
:param str buildCacheDir: a path relative to the mx suite output root that is used to store the cache files.
:return: a dictionary associating each found test class with the distribution it occurs in.
"""
assert not isabs(buildCacheDir), "buildCacheDir must be a relative path"
compat_suite = suite if suite else mx.primary_suite()
if suite != mx._mx_suite and compat_suite.getMxCompatibility().useDistsForUnittest():
jar_distributions = [d for d in mx.sorted_dists() if
d.isJARDistribution() and exists(d.classpath_repr(resolve=False)) and (
not suite or d.suite == suite)]
# find a corresponding distribution for each test
candidates = _find_classes_by_annotated_methods(annotations, jar_distributions, buildCacheDir, jdk)
else:
binary_deps = [d for d in mx.dependencies(opt_limit_to_suite=True) if d.isJARDistribution() and
isinstance(d.suite, mx.BinarySuite) and (not suite or suite == d.suite)]
candidates = _find_classes_by_annotated_methods(annotations, binary_deps, buildCacheDir, jdk)
for p in mx.projects(opt_limit_to_suite=True):
if not p.isJavaProject():
continue
if suite and not p.suite == suite:
continue
if jdk.javaCompliance < p.javaCompliance:
continue
for c in _find_classes_with_annotations(p, None, annotations):
candidates[c] = p
return candidates
def _run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whitelist, regex, suite):
vmArgs, tests = mx.extract_VM_args(args)
for t in tests:
if t.startswith('-'):
mx.abort('VM option ' + t + ' must precede ' + tests[0])
candidates = find_test_candidates(annotations, suite, vmLauncher.jdk())
classes, depsContainingTests = _filter_test_candidates(candidates, tests)
full_ignorelist = blacklist or []
full_ignorelist += _global_ignore_globs
if full_ignorelist:
classes = [c for c in classes if not any((glob.match(c) for glob in full_ignorelist))]
if whitelist:
classes = [c for c in classes if any((glob.match(c) for glob in whitelist))]
if regex:
classes = [c for c in classes if re.search(regex, c)]
if len(classes) != 0:
f_testfile = open(testfile, 'w')
for c in sorted(classes):
f_testfile.write(c + '\n')
f_testfile.close()
harness(depsContainingTests, vmLauncher, vmArgs)
#: A `_VMLauncher` object.
_vm_launcher = None
_config_participants = []
def set_vm_launcher(name, launcher, jdk=None):
"""
Sets the details for running the JVM given the components of unit test command line.
:param str name: a descriptive name for the launcher
:param callable launcher: a function taking 3 positional arguments; the first is a list of the
arguments to go before the main class name on the JVM command line, the second is the
name of the main class to run and the third is a list of the arguments to go after
the main class name on the JVM command line
:param jdk: a `JDKConfig` or no-arg callable that produces a `JDKConfig` object denoting
the JDK containing the JVM that will be executed. This is used to resolve JDK
relative dependencies (such as `JdkLibrary`s) needed by the unit tests.
"""
global _vm_launcher
assert _vm_launcher is None, 'cannot override unit test VM launcher ' + _vm_launcher.name
if jdk is None:
def _jdk():
jdk = mx.get_jdk()
mx.warn('Assuming ' + str(jdk) + ' contains JVM executed by ' + name)
return _jdk
_vm_launcher = _VMLauncher(name, launcher, jdk)
def add_config_participant(p):
_config_participants.append(p)
def get_config_participants_copy():
"""
Returns a copy of the currently registered config participants. A config participant is a
function that receives a tuple (VM arguments, main class, main class arguments) and returns
a potentially modified tuple. Other parts of mx can register config participants with
:func:`add_config_participant` to change the way unit tests are invoked.
"""
return _config_participants.copy()
_extra_unittest_arguments = []
def add_unittest_argument(*args, **kwargs):
"""
Adds an argument declaration to the ArgumentParser used by the unittest method.
The argument can be processed via a custom `action=MyAction`.
"""
_extra_unittest_arguments.append((args, kwargs))
def _unittest(args, annotations, junit_args, prefixCp="", blacklist=None, whitelist=None, regex=None, suite=None, **extra_args):
testfile = os.environ.get('MX_TESTFILE', None)
if testfile is None:
(_, testfile) = tempfile.mkstemp(".testclasses", "mxtool")
os.close(_)
mainClass = 'com.oracle.mxtool.junit.MxJUnitWrapper'
mx.build(['--no-daemon', '--dependencies', 'JUNIT_TOOL'])
coreCp = mx.classpath(['JUNIT_TOOL'])
def harness(unittestDeps, vmLauncher, vmArgs):
prefixArgs = ['-esa', '-ea']
if '-JUnitGCAfterTest' in junit_args:
prefixArgs.append('-XX:-DisableExplicitGC')
with open(testfile) as fp:
testclasses = [l.rstrip() for l in fp.readlines()]
jdk = vmLauncher.jdk()
vmArgs += mx.get_runtime_jvm_args(unittestDeps, cp_prefix=prefixCp+coreCp, jdk=jdk)
# suppress menubar and dock when running on Mac
vmArgs = prefixArgs + ['-Djava.awt.headless=true'] + vmArgs
if jdk.javaCompliance > '1.8':
# This is required to access jdk.internal.module.Modules for supporting
# the @AddExports annotation.
vmArgs = vmArgs + ['--add-exports=java.base/jdk.internal.module=ALL-UNNAMED']
# Execute Junit directly when one test is being run. This simplifies
# replaying the VM execution in a native debugger (e.g., gdb).
mainClassArgs = junit_args + (testclasses if len(testclasses) == 1 else ['@' + mx._cygpathU2W(testfile)])
config = (vmArgs, mainClass, mainClassArgs)
for p in _config_participants:
config = p(config)
vmLauncher.launcher(*config)
vmLauncher = _vm_launcher
if vmLauncher is None:
jdk = mx.get_jdk()
def _run_vm(vmArgs, mainClass, mainClassArgs):
mx.run_java(vmArgs + [mainClass] + mainClassArgs, jdk=jdk)
vmLauncher = _VMLauncher('default VM launcher', _run_vm, jdk)
try:
_run_tests(args, harness, vmLauncher, annotations, testfile, blacklist, whitelist, regex, mx.suite(suite) if suite else None)
finally:
if os.environ.get('MX_TESTFILE') is None:
os.remove(testfile)
unittestHelpSuffix = """
To avoid conflicts with VM options, '--' can be used as delimiter.
If test filters are supplied, only tests whose fully qualified name
includes a filter as a substring are run.
For example:
mx unittest -Dgraal.Dump= -Dgraal.MethodFilter=BC_aload -Dgraal.PrintCFG=true BC_aload
will run all JUnit test classes that contain 'BC_aload' in their
fully qualified name and will pass these options to the VM:
-Dgraal.Dump= -Dgraal.MethodFilter=BC_aload -Dgraal.PrintCFG=true
To get around command line length limitations on some OSes, the
JUnit class names to be executed are written to a file that a
custom JUnit wrapper reads and passes onto JUnit proper. The
MX_TESTFILE environment variable can be set to specify a
file which will not be deleted once the unit tests are done
(unlike the temporary file otherwise used).
As with all other commands, using the global '-v' before 'unittest'
command will cause mx to show the complete command line
it uses to run the VM.
The grammar for the argument to the --open-packages option is:
export_spec ::= module_spec "/" package_spec [ "=" target_spec [ "," target_spec ]* ]
module_spec ::= name [ "*" ]
package_spec ::= name [ "*" ] | "*"
target_spec ::= "ALL-UNNAMED" | name [ "*" ] | "*"
Examples:
Export and open all packages in jdk.internal.vm.compiler to all unnamed modules:
--open-packages jdk.internal.vm.compiler/*=ALL-UNNAMED
Equivalent shorthand form:
--open-packages jdk.internal.vm.compiler/*
Export and open all packages starting with "org.graalvm.compiler." in all
modules whose name starts with "jdk.internal.vm." to all unnamed modules:
--open-packages jdk.internal.vm.*/org.graalvm.compiler.*
Same as above but also export and open to the org.graalvm.enterprise module:
--open-packages jdk.internal.vm.*/org.graalvm.compiler.*=ALL-UNNAMED,org.graalvm.enterprise
"""
def is_strictly_positive(value):
try:
if int(value) <= 0:
raise ArgumentTypeError("%s must be greater than 0" % value)
except ValueError:
raise ArgumentTypeError("%s: integer greater than 0 expected" % value)
return int(value)
def parse_split_args(args, parser, delimiter):
"""
Parses arguments potentially separated by a delimiter, e.g., ``[mx options] -- [VM options]``.
The options preceding the delimiter are parsed with the supplied argparse parser. That is,
the parser is expected to recognize all arguments before the delimiter. If no delimiter is
present, the supplied parser does not have to recognize all arguments.
:param list args: a list of arguments to parse.
:param parser: an argparse parser that can recognize any argument preceding the delimiter.
:param str delimiter: the delimiter separating the arguments.
:return: a tuple consisting of the args not parsed by the parser (i.e. either the args after
the delimiter or the args not recognized by the parser if no delimiter is present)
and the parsed argument structure returned by argparse.
"""
args_before_delimiter = []
delimiter_found = False
# copy the input so we don't modify the parameter
args_copy = list(args)
# check for delimiter
while len(args_copy) > 0:
arg = args_copy.pop(0)
if arg == delimiter:
delimiter_found = True
break
args_before_delimiter.append(arg)
if delimiter_found:
# all arguments before '--' must be recognized
parsed_args = parser.parse_args(args_before_delimiter)
else:
# parse all known arguments
parsed_args, args_copy = parser.parse_known_args(args_before_delimiter)
return args_copy, parsed_args
@mx.command(suite_name="mx",
command_name='unittest',
usage_msg='[unittest options] [--] [VM options] [filters...]',
doc_function=lambda: unittestHelpSuffix,
auto_add=False)
def unittest(args):
"""run the JUnit tests"""
junit_arg_actions = []
junit_args = []
class MxJUnitWrapperArg(Action):
def __init__(self, **kwargs):
kwargs['required'] = False
Action.__init__(self, **kwargs)
junit_arg_actions.append(self)
def __call__(self, parser, namespace, values, option_string=None):
junit_args.append('-' + self.dest)
junit_args.append(values)
class MxJUnitWrapperBoolArg(Action):
def __init__(self, **kwargs):
kwargs['required'] = False
kwargs['nargs'] = 0
Action.__init__(self, **kwargs)
junit_arg_actions.append(self)
def __call__(self, parser, namespace, values, option_string=None):
junit_args.append('-' + self.dest)
parser = ArgumentParser(prog='mx unittest',
description='run the JUnit tests',
formatter_class=RawDescriptionHelpFormatter,
epilog=unittestHelpSuffix,
)
parser.add_argument('--blacklist', help='run all testcases not specified in <file>', metavar='<file>')
parser.add_argument('--whitelist', help='run testcases specified in <file> only', metavar='<file>')
parser.add_argument('--verbose', help='enable verbose JUnit output', dest='JUnitVerbose', action=MxJUnitWrapperBoolArg)
parser.add_argument('--very-verbose', help='enable very verbose JUnit output', dest='JUnitVeryVerbose', action=MxJUnitWrapperBoolArg)
parser.add_argument('--max-class-failures', help='stop after N test classes that have a failure (default is no limit)', metavar='<N>', dest='JUnitMaxClassFailures', action=MxJUnitWrapperArg)
parser.add_argument('--fail-fast', help='alias for --max-class-failures=1', dest='JUnitFailFast', action=MxJUnitWrapperBoolArg)
parser.add_argument('--enable-timing', help='enable JUnit test timing (requires --verbose/--very-verbose)', dest='JUnitEnableTiming', action=MxJUnitWrapperBoolArg)
parser.add_argument('--regex', help='run only testcases matching a regular expression', metavar='<regex>')
parser.add_argument('--color', help='enable color output', dest='JUnitColor', action=MxJUnitWrapperBoolArg)
parser.add_argument('--gc-after-test', help='force a GC after each test', dest='JUnitGCAfterTest', action=MxJUnitWrapperBoolArg)
parser.add_argument('--record-results', help='record test class results to passed.txt and failed.txt', dest='JUnitRecordResults', action=MxJUnitWrapperBoolArg)
record_help_msg_shared = 'If <file> is "-", the tests will be printed to stdout. ' +\
'In contrast to --record-results this prints not only the test class but also the test method.'
parser.add_argument('--print-passed', metavar="<file>", dest='JUnitRecordPassed', action=MxJUnitWrapperArg, help='record passed test class results in <file>. ' + record_help_msg_shared)
parser.add_argument('--print-failed', metavar="<file>", dest='JUnitRecordFailed', action=MxJUnitWrapperArg, help='record failed test class results in <file>. ' + record_help_msg_shared)
parser.add_argument('--suite', help='run only the unit tests in <suite>', metavar='<suite>')
parser.add_argument('--repeat', help='run each test <n> times', dest='JUnitRepeat', action=MxJUnitWrapperArg, type=is_strictly_positive, metavar='<n>')
parser.add_argument('--open-packages', dest='JUnitOpenPackages', action=MxJUnitWrapperArg, metavar='<module>/<package>[=<target-module>(,<target-module>)*]',
help="export and open packages regardless of module declarations (see more detail and examples below)")
eagerStacktrace = parser.add_mutually_exclusive_group()
eagerStacktrace.add_argument('--eager-stacktrace', action='store_const', const=True, dest='eager_stacktrace', help='print test errors as they occur (default)')
eagerStacktrace.add_argument('--no-eager-stacktrace', action='store_const', const=False, dest='eager_stacktrace', help='print test errors after all tests have run')
for a, k in _extra_unittest_arguments:
parser.add_argument(*a, **k)
# Augment usage text to mention test filters and options passed to the VM
usage = parser.format_usage().strip()
if usage.startswith('usage: '):
usage = usage[len('usage: '):]
parser.usage = usage + ' [test filters...] [VM options...]'
args, parsed_args = parse_split_args(args, parser, "--")
# Remove junit_args values from parsed_args
for a in junit_arg_actions:
parsed_args.__dict__.pop(a.dest)
if parsed_args.whitelist:
try:
with open(parsed_args.whitelist) as fp:
parsed_args.whitelist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
except IOError:
mx.log('warning: could not read whitelist: ' + parsed_args.whitelist)
if parsed_args.blacklist:
try:
with open(parsed_args.blacklist) as fp:
parsed_args.blacklist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
except IOError:
mx.log('warning: could not read blacklist: ' + parsed_args.blacklist)
if parsed_args.eager_stacktrace is None:
junit_args.append('-JUnitEagerStackTrace')
parsed_args.__dict__.pop('eager_stacktrace')
_unittest(args, ['@Test', '@Parameters'], junit_args, **parsed_args.__dict__)