Bumping gaia.json for 2 gaia revision(s) a=gaia-bump
[gecko.git] / testing / mach_commands.py
blob23e12ae4647f61c01efeb5565222fb23ea01147b
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
5 from __future__ import print_function, unicode_literals
7 import os
8 import sys
10 from mach.decorators import (
11 CommandArgument,
12 CommandProvider,
13 Command,
16 from mozbuild.base import MachCommandBase
19 UNKNOWN_TEST = '''
20 I was unable to find tests in the argument(s) given.
22 You need to specify a test directory, filename, test suite name, or
23 abbreviation.
25 It's possible my little brain doesn't know about the type of test you are
26 trying to execute. If you suspect this, please request support by filing
27 a bug at
28 https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=General.
29 '''.strip()
31 UNKNOWN_FLAVOR = '''
32 I know you are trying to run a %s test. Unfortunately, I can't run those
33 tests yet. Sorry!
34 '''.strip()
36 MOCHITEST_CHUNK_BY_DIR = 4
37 MOCHITEST_TOTAL_CHUNKS = 5
39 TEST_SUITES = {
40 'cppunittest': {
41 'aliases': ('Cpp', 'cpp'),
42 'mach_command': 'cppunittest',
43 'kwargs': {'test_file': None},
45 'crashtest': {
46 'aliases': ('C', 'Rc', 'RC', 'rc'),
47 'mach_command': 'crashtest',
48 'kwargs': {'test_file': None},
50 'crashtest-ipc': {
51 'aliases': ('Cipc', 'cipc'),
52 'mach_command': 'crashtest-ipc',
53 'kwargs': {'test_file': None},
55 'jetpack': {
56 'aliases': ('J',),
57 'mach_command': 'jetpack-test',
58 'kwargs': {},
60 'check-spidermonkey': {
61 'aliases': ('Sm', 'sm'),
62 'mach_command': 'check-spidermonkey',
63 'kwargs': {'valgrind': False},
65 'mochitest-a11y': {
66 'mach_command': 'mochitest',
67 'kwargs': {'flavor': 'a11y', 'test_paths': None},
69 'mochitest-browser': {
70 'aliases': ('bc', 'BC', 'Bc'),
71 'mach_command': 'mochitest-browser',
72 'kwargs': {'flavor': 'browser-chrome', 'test_paths': None},
74 'mochitest-chrome': {
75 'mach_command': 'mochitest',
76 'kwargs': {'flavor': 'chrome', 'test_paths': None},
78 'mochitest-devtools': {
79 'aliases': ('dt', 'DT', 'Dt'),
80 'mach_command': 'mochitest-browser',
81 'kwargs': {'subsuite': 'devtools', 'test_paths': None},
83 'mochitest-ipcplugins': {
84 'make_target': 'mochitest-ipcplugins',
86 'mochitest-plain': {
87 'mach_command': 'mochitest',
88 'kwargs': {'flavor': 'mochitest', 'test_paths': None},
90 'reftest': {
91 'aliases': ('RR', 'rr', 'Rr'),
92 'mach_command': 'reftest',
93 'kwargs': {'test_file': None},
95 'reftest-ipc': {
96 'aliases': ('Ripc',),
97 'mach_command': 'reftest-ipc',
98 'kwargs': {'test_file': None},
100 'valgrind': {
101 'aliases': ('V', 'v'),
102 'mach_command': 'valgrind-test',
103 'kwargs': {},
105 'xpcshell': {
106 'aliases': ('X', 'x'),
107 'mach_command': 'xpcshell-test',
108 'kwargs': {'test_file': 'all'},
112 # Maps test flavors to metadata on how to run that test.
113 TEST_FLAVORS = {
114 'a11y': {
115 'mach_command': 'mochitest',
116 'kwargs': {'flavor': 'a11y', 'test_paths': []},
118 'browser-chrome': {
119 'mach_command': 'mochitest',
120 'kwargs': {'flavor': 'browser-chrome', 'test_paths': []},
122 'chrashtest': { },
123 'chrome': {
124 'mach_command': 'mochitest',
125 'kwargs': {'flavor': 'chrome', 'test_paths': []},
127 'mochitest': {
128 'mach_command': 'mochitest',
129 'kwargs': {'flavor': 'mochitest', 'test_paths': []},
131 'reftest': { },
132 'steeplechase': { },
133 'webapprt-chrome': {
134 'mach_command': 'mochitest',
135 'kwargs': {'flavor': 'webapprt-chrome', 'test_paths': []},
137 'xpcshell': {
138 'mach_command': 'xpcshell-test',
139 'kwargs': {'test_paths': []},
144 for i in range(1, MOCHITEST_TOTAL_CHUNKS + 1):
145 TEST_SUITES['mochitest-%d' %i] = {
146 'aliases': ('M%d' % i, 'm%d' % i),
147 'mach_command': 'mochitest',
148 'kwargs': {
149 'flavor': 'mochitest',
150 'chunk_by_dir': MOCHITEST_CHUNK_BY_DIR,
151 'total_chunks': MOCHITEST_TOTAL_CHUNKS,
152 'this_chunk': i,
153 'test_paths': None,
157 TEST_HELP = '''
158 Test or tests to run. Tests can be specified by filename, directory, suite
159 name or suite alias.
161 The following test suites and aliases are supported: %s
162 ''' % ', '.join(sorted(TEST_SUITES))
163 TEST_HELP = TEST_HELP.strip()
166 @CommandProvider
167 class Test(MachCommandBase):
168 @Command('test', category='testing', description='Run tests.')
169 @CommandArgument('what', default=None, nargs='*', help=TEST_HELP)
170 def test(self, what):
171 from mozbuild.testing import TestResolver
173 # Parse arguments and assemble a test "plan."
174 run_suites = set()
175 run_tests = []
176 resolver = self._spawn(TestResolver)
178 for entry in what:
179 # If the path matches the name or alias of an entire suite, run
180 # the entire suite.
181 if entry in TEST_SUITES:
182 run_suites.add(entry)
183 continue
184 suitefound = False
185 for suite, v in TEST_SUITES.items():
186 if entry in v.get('aliases', []):
187 run_suites.add(suite)
188 suitefound = True
189 if suitefound:
190 continue
192 # Now look for file/directory matches in the TestResolver.
193 relpath = self._wrap_path_argument(entry).relpath()
194 tests = list(resolver.resolve_tests(paths=[relpath]))
195 run_tests.extend(tests)
197 if not tests:
198 print('UNKNOWN TEST: %s' % entry, file=sys.stderr)
200 if not run_suites and not run_tests:
201 print(UNKNOWN_TEST)
202 return 1
204 status = None
205 for suite_name in run_suites:
206 suite = TEST_SUITES[suite_name]
208 if 'mach_command' in suite:
209 res = self._mach_context.commands.dispatch(
210 suite['mach_command'], self._mach_context,
211 **suite['kwargs'])
212 if res:
213 status = res
215 elif 'make_target' in suite:
216 res = self._run_make(target=suite['make_target'],
217 pass_thru=True)
218 if res:
219 status = res
221 flavors = {}
222 for test in run_tests:
223 flavors.setdefault(test['flavor'], []).append(test)
225 for flavor, tests in sorted(flavors.items()):
226 if flavor not in TEST_FLAVORS:
227 print(UNKNOWN_FLAVOR % flavor)
228 status = 1
229 continue
231 m = TEST_FLAVORS[flavor]
232 if 'mach_command' not in m:
233 print(UNKNOWN_FLAVOR % flavor)
234 status = 1
235 continue
237 res = self._mach_context.commands.dispatch(
238 m['mach_command'], self._mach_context,
239 test_objects=tests, **m['kwargs'])
240 if res:
241 status = res
243 return status
246 @CommandProvider
247 class MachCommands(MachCommandBase):
248 @Command('cppunittest', category='testing',
249 description='Run cpp unit tests.')
250 @CommandArgument('test_files', nargs='*', metavar='N',
251 help='Test to run. Can be specified as one or more files or ' \
252 'directories, or omitted. If omitted, the entire test suite is ' \
253 'executed.')
255 def run_cppunit_test(self, **params):
256 import runcppunittests as cppunittests
257 import logging
259 if len(params['test_files']) == 0:
260 testdir = os.path.join(self.distdir, 'cppunittests')
261 progs = cppunittests.extract_unittests_from_args([testdir], None)
262 else:
263 progs = cppunittests.extract_unittests_from_args(params['test_files'], None)
265 # See if we have crash symbols
266 symbols_path = os.path.join(self.distdir, 'crashreporter-symbols')
267 if not os.path.isdir(symbols_path):
268 symbols_path = None
270 tester = cppunittests.CPPUnitTests()
271 try:
272 result = tester.run_tests(progs, self.bindir, symbols_path)
273 except Exception, e:
274 self.log(logging.ERROR, 'cppunittests',
275 {'exception': str(e)},
276 'Caught exception running cpp unit tests: {exception}')
277 result = False
279 return 0 if result else 1
281 @CommandProvider
282 class CheckSpiderMonkeyCommand(MachCommandBase):
283 @Command('check-spidermonkey', category='testing', description='Run SpiderMonkey tests.')
284 @CommandArgument('--valgrind', action='store_true', help='Run jit-test suite with valgrind flag')
286 def run_checkspidermonkey(self, **params):
287 import subprocess
288 import sys
290 bin_suffix = ''
291 if sys.platform.startswith('win'):
292 bin_suffix = '.exe'
294 js = os.path.join(self.bindir, 'js%s' % bin_suffix)
296 print('Running jit-tests')
297 jittest_cmd = [os.path.join(self.topsrcdir, 'js', 'src', 'jit-test', 'jit_test.py'),
298 js, '--no-slow', '--tbpl']
299 if params['valgrind']:
300 jittest_cmd.append('--valgrind')
302 jittest_result = subprocess.call(jittest_cmd)
304 print('running jstests')
305 jstest_cmd = [os.path.join(self.topsrcdir, 'js', 'src', 'tests', 'jstests.py'),
306 js, '--tbpl']
307 jstest_result = subprocess.call(jstest_cmd)
309 print('running jsapi-tests')
310 jsapi_tests_cmd = [os.path.join(self.bindir, 'jsapi-tests%s' % bin_suffix)]
311 jsapi_tests_result = subprocess.call(jsapi_tests_cmd)
313 print('running check-style')
314 check_style_cmd = [sys.executable, os.path.join(self.topsrcdir, 'config', 'check_spidermonkey_style.py')]
315 check_style_result = subprocess.call(check_style_cmd, cwd=os.path.join(self.topsrcdir, 'js', 'src'))
317 all_passed = jittest_result and jstest_result and jsapi_tests_result and check_style_result
319 return all_passed