Revert of Files.app: Split background.js (patchset #4 id:60001 of https://codereview...
[chromium-blink-merge.git] / PRESUBMIT.py
bloba3e3af04ec62b142cad6c4040728969bd962e000
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
9 """
12 import re
13 import sys
16 _EXCLUDED_PATHS = (
17 r"^breakpad[\\\/].*",
18 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
22 r"^skia[\\\/].*",
23 r"^v8[\\\/].*",
24 r".*MakeFile$",
25 r".+_autogen\.h$",
26 r".+[\\\/]pnacl_shim\.c$",
27 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
33 _TESTRUNNER_PATHS = (
34 r"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
43 # (best effort).
44 _TEST_CODE_EXCLUDED_PATHS = (
45 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
47 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS,
49 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.*[\\\/](test|tool(s)?)[\\\/].*',
51 # content_shell is used for running layout tests.
52 r'content[\\\/]shell[\\\/].*',
53 # At request of folks maintaining this folder.
54 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
55 # Non-production example code.
56 r'mojo[\\\/]examples[\\\/].*',
57 # Launcher for running iOS tests on the simulator.
58 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
61 _TEST_ONLY_WARNING = (
62 'You might be calling functions intended only for testing from\n'
63 'production code. It is OK to ignore this warning if you know what\n'
64 'you are doing, as the heuristics used to detect the situation are\n'
65 'not perfect. The commit queue will not block on this warning.')
68 _INCLUDE_ORDER_WARNING = (
69 'Your #include order seems to be broken. Send mail to\n'
70 'marja@chromium.org if this is not the case.')
73 _BANNED_OBJC_FUNCTIONS = (
75 'addTrackingRect:',
77 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
78 'prohibited. Please use CrTrackingArea instead.',
79 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
81 False,
84 r'/NSTrackingArea\W',
86 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
87 'instead.',
88 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
90 False,
93 'convertPointFromBase:',
95 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
96 'Please use |convertPoint:(point) fromView:nil| instead.',
97 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
99 True,
102 'convertPointToBase:',
104 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
105 'Please use |convertPoint:(point) toView:nil| instead.',
106 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
108 True,
111 'convertRectFromBase:',
113 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
114 'Please use |convertRect:(point) fromView:nil| instead.',
115 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
117 True,
120 'convertRectToBase:',
122 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
123 'Please use |convertRect:(point) toView:nil| instead.',
124 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
126 True,
129 'convertSizeFromBase:',
131 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
132 'Please use |convertSize:(point) fromView:nil| instead.',
133 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
135 True,
138 'convertSizeToBase:',
140 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
141 'Please use |convertSize:(point) toView:nil| instead.',
142 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
144 True,
149 _BANNED_CPP_FUNCTIONS = (
150 # Make sure that gtest's FRIEND_TEST() macro is not used; the
151 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
152 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
154 'FRIEND_TEST(',
156 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
157 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
159 False,
163 'ScopedAllowIO',
165 'New code should not use ScopedAllowIO. Post a task to the blocking',
166 'pool or the FILE thread instead.',
168 True,
170 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
171 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
172 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
173 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
174 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
175 r"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
176 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
177 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
181 'SkRefPtr',
183 'The use of SkRefPtr is prohibited. ',
184 'Please use skia::RefPtr instead.'
186 True,
190 'SkAutoRef',
192 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
193 'Please use skia::RefPtr instead.'
195 True,
199 'SkAutoTUnref',
201 'The use of SkAutoTUnref is dangerous because it implicitly ',
202 'converts to a raw pointer. Please use skia::RefPtr instead.'
204 True,
208 'SkAutoUnref',
210 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
211 'because it implicitly converts to a raw pointer. ',
212 'Please use skia::RefPtr instead.'
214 True,
218 r'/HANDLE_EINTR\(.*close',
220 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
221 'descriptor will be closed, and it is incorrect to retry the close.',
222 'Either call close directly and ignore its return value, or wrap close',
223 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
225 True,
229 r'/IGNORE_EINTR\((?!.*close)',
231 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
232 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
234 True,
236 # Files that #define IGNORE_EINTR.
237 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
238 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
242 r'/v8::Extension\(',
244 'Do not introduce new v8::Extensions into the code base, use',
245 'gin::Wrappable instead. See http://crbug.com/334679',
247 True,
249 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
255 _VALID_OS_MACROS = (
256 # Please keep sorted.
257 'OS_ANDROID',
258 'OS_ANDROID_HOST',
259 'OS_BSD',
260 'OS_CAT', # For testing.
261 'OS_CHROMEOS',
262 'OS_FREEBSD',
263 'OS_IOS',
264 'OS_LINUX',
265 'OS_MACOSX',
266 'OS_NACL',
267 'OS_OPENBSD',
268 'OS_POSIX',
269 'OS_QNX',
270 'OS_SOLARIS',
271 'OS_WIN',
275 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
276 """Attempts to prevent use of functions intended only for testing in
277 non-testing code. For now this is just a best-effort implementation
278 that ignores header files and may have some false positives. A
279 better implementation would probably need a proper C++ parser.
281 # We only scan .cc files and the like, as the declaration of
282 # for-testing functions in header files are hard to distinguish from
283 # calls to such functions without a proper C++ parser.
284 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
286 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
287 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
288 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
289 exclusion_pattern = input_api.re.compile(
290 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
291 base_function_pattern, base_function_pattern))
293 def FilterFile(affected_file):
294 black_list = (_EXCLUDED_PATHS +
295 _TEST_CODE_EXCLUDED_PATHS +
296 input_api.DEFAULT_BLACK_LIST)
297 return input_api.FilterSourceFile(
298 affected_file,
299 white_list=(file_inclusion_pattern, ),
300 black_list=black_list)
302 problems = []
303 for f in input_api.AffectedSourceFiles(FilterFile):
304 local_path = f.LocalPath()
305 for line_number, line in f.ChangedContents():
306 if (inclusion_pattern.search(line) and
307 not comment_pattern.search(line) and
308 not exclusion_pattern.search(line)):
309 problems.append(
310 '%s:%d\n %s' % (local_path, line_number, line.strip()))
312 if problems:
313 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
314 else:
315 return []
318 def _CheckNoIOStreamInHeaders(input_api, output_api):
319 """Checks to make sure no .h files include <iostream>."""
320 files = []
321 pattern = input_api.re.compile(r'^#include\s*<iostream>',
322 input_api.re.MULTILINE)
323 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
324 if not f.LocalPath().endswith('.h'):
325 continue
326 contents = input_api.ReadFile(f)
327 if pattern.search(contents):
328 files.append(f)
330 if len(files):
331 return [ output_api.PresubmitError(
332 'Do not #include <iostream> in header files, since it inserts static '
333 'initialization into every file including the header. Instead, '
334 '#include <ostream>. See http://crbug.com/94794',
335 files) ]
336 return []
339 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
340 """Checks to make sure no source files use UNIT_TEST"""
341 problems = []
342 for f in input_api.AffectedFiles():
343 if (not f.LocalPath().endswith(('.cc', '.mm'))):
344 continue
346 for line_num, line in f.ChangedContents():
347 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
348 problems.append(' %s:%d' % (f.LocalPath(), line_num))
350 if not problems:
351 return []
352 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
353 '\n'.join(problems))]
356 def _CheckNoNewWStrings(input_api, output_api):
357 """Checks to make sure we don't introduce use of wstrings."""
358 problems = []
359 for f in input_api.AffectedFiles():
360 if (not f.LocalPath().endswith(('.cc', '.h')) or
361 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
362 continue
364 allowWString = False
365 for line_num, line in f.ChangedContents():
366 if 'presubmit: allow wstring' in line:
367 allowWString = True
368 elif not allowWString and 'wstring' in line:
369 problems.append(' %s:%d' % (f.LocalPath(), line_num))
370 allowWString = False
371 else:
372 allowWString = False
374 if not problems:
375 return []
376 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
377 ' If you are calling a cross-platform API that accepts a wstring, '
378 'fix the API.\n' +
379 '\n'.join(problems))]
382 def _CheckNoDEPSGIT(input_api, output_api):
383 """Make sure .DEPS.git is never modified manually."""
384 if any(f.LocalPath().endswith('.DEPS.git') for f in
385 input_api.AffectedFiles()):
386 return [output_api.PresubmitError(
387 'Never commit changes to .DEPS.git. This file is maintained by an\n'
388 'automated system based on what\'s in DEPS and your changes will be\n'
389 'overwritten.\n'
390 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
391 'for more information')]
392 return []
395 def _CheckNoBannedFunctions(input_api, output_api):
396 """Make sure that banned functions are not used."""
397 warnings = []
398 errors = []
400 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
401 for f in input_api.AffectedFiles(file_filter=file_filter):
402 for line_num, line in f.ChangedContents():
403 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
404 matched = False
405 if func_name[0:1] == '/':
406 regex = func_name[1:]
407 if input_api.re.search(regex, line):
408 matched = True
409 elif func_name in line:
410 matched = True
411 if matched:
412 problems = warnings;
413 if error:
414 problems = errors;
415 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
416 for message_line in message:
417 problems.append(' %s' % message_line)
419 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
420 for f in input_api.AffectedFiles(file_filter=file_filter):
421 for line_num, line in f.ChangedContents():
422 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
423 def IsBlacklisted(affected_file, blacklist):
424 local_path = affected_file.LocalPath()
425 for item in blacklist:
426 if input_api.re.match(item, local_path):
427 return True
428 return False
429 if IsBlacklisted(f, excluded_paths):
430 continue
431 matched = False
432 if func_name[0:1] == '/':
433 regex = func_name[1:]
434 if input_api.re.search(regex, line):
435 matched = True
436 elif func_name in line:
437 matched = True
438 if matched:
439 problems = warnings;
440 if error:
441 problems = errors;
442 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
443 for message_line in message:
444 problems.append(' %s' % message_line)
446 result = []
447 if (warnings):
448 result.append(output_api.PresubmitPromptWarning(
449 'Banned functions were used.\n' + '\n'.join(warnings)))
450 if (errors):
451 result.append(output_api.PresubmitError(
452 'Banned functions were used.\n' + '\n'.join(errors)))
453 return result
456 def _CheckNoPragmaOnce(input_api, output_api):
457 """Make sure that banned functions are not used."""
458 files = []
459 pattern = input_api.re.compile(r'^#pragma\s+once',
460 input_api.re.MULTILINE)
461 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
462 if not f.LocalPath().endswith('.h'):
463 continue
464 contents = input_api.ReadFile(f)
465 if pattern.search(contents):
466 files.append(f)
468 if files:
469 return [output_api.PresubmitError(
470 'Do not use #pragma once in header files.\n'
471 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
472 files)]
473 return []
476 def _CheckNoTrinaryTrueFalse(input_api, output_api):
477 """Checks to make sure we don't introduce use of foo ? true : false."""
478 problems = []
479 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
480 for f in input_api.AffectedFiles():
481 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
482 continue
484 for line_num, line in f.ChangedContents():
485 if pattern.match(line):
486 problems.append(' %s:%d' % (f.LocalPath(), line_num))
488 if not problems:
489 return []
490 return [output_api.PresubmitPromptWarning(
491 'Please consider avoiding the "? true : false" pattern if possible.\n' +
492 '\n'.join(problems))]
495 def _CheckUnwantedDependencies(input_api, output_api):
496 """Runs checkdeps on #include statements added in this
497 change. Breaking - rules is an error, breaking ! rules is a
498 warning.
500 # We need to wait until we have an input_api object and use this
501 # roundabout construct to import checkdeps because this file is
502 # eval-ed and thus doesn't have __file__.
503 original_sys_path = sys.path
504 try:
505 sys.path = sys.path + [input_api.os_path.join(
506 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
507 import checkdeps
508 from cpp_checker import CppChecker
509 from rules import Rule
510 finally:
511 # Restore sys.path to what it was before.
512 sys.path = original_sys_path
514 added_includes = []
515 for f in input_api.AffectedFiles():
516 if not CppChecker.IsCppFile(f.LocalPath()):
517 continue
519 changed_lines = [line for line_num, line in f.ChangedContents()]
520 added_includes.append([f.LocalPath(), changed_lines])
522 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
524 error_descriptions = []
525 warning_descriptions = []
526 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
527 added_includes):
528 description_with_path = '%s\n %s' % (path, rule_description)
529 if rule_type == Rule.DISALLOW:
530 error_descriptions.append(description_with_path)
531 else:
532 warning_descriptions.append(description_with_path)
534 results = []
535 if error_descriptions:
536 results.append(output_api.PresubmitError(
537 'You added one or more #includes that violate checkdeps rules.',
538 error_descriptions))
539 if warning_descriptions:
540 results.append(output_api.PresubmitPromptOrNotify(
541 'You added one or more #includes of files that are temporarily\n'
542 'allowed but being removed. Can you avoid introducing the\n'
543 '#include? See relevant DEPS file(s) for details and contacts.',
544 warning_descriptions))
545 return results
548 def _CheckFilePermissions(input_api, output_api):
549 """Check that all files have their permissions properly set."""
550 if input_api.platform == 'win32':
551 return []
552 args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
553 input_api.change.RepositoryRoot()]
554 for f in input_api.AffectedFiles():
555 args += ['--file', f.LocalPath()]
556 checkperms = input_api.subprocess.Popen(args,
557 stdout=input_api.subprocess.PIPE)
558 errors = checkperms.communicate()[0].strip()
559 if errors:
560 return [output_api.PresubmitError('checkperms.py failed.',
561 errors.splitlines())]
562 return []
565 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
566 """Makes sure we don't include ui/aura/window_property.h
567 in header files.
569 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
570 errors = []
571 for f in input_api.AffectedFiles():
572 if not f.LocalPath().endswith('.h'):
573 continue
574 for line_num, line in f.ChangedContents():
575 if pattern.match(line):
576 errors.append(' %s:%d' % (f.LocalPath(), line_num))
578 results = []
579 if errors:
580 results.append(output_api.PresubmitError(
581 'Header files should not include ui/aura/window_property.h', errors))
582 return results
585 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
586 """Checks that the lines in scope occur in the right order.
588 1. C system files in alphabetical order
589 2. C++ system files in alphabetical order
590 3. Project's .h files
593 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
594 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
595 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
597 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
599 state = C_SYSTEM_INCLUDES
601 previous_line = ''
602 previous_line_num = 0
603 problem_linenums = []
604 for line_num, line in scope:
605 if c_system_include_pattern.match(line):
606 if state != C_SYSTEM_INCLUDES:
607 problem_linenums.append((line_num, previous_line_num))
608 elif previous_line and previous_line > line:
609 problem_linenums.append((line_num, previous_line_num))
610 elif cpp_system_include_pattern.match(line):
611 if state == C_SYSTEM_INCLUDES:
612 state = CPP_SYSTEM_INCLUDES
613 elif state == CUSTOM_INCLUDES:
614 problem_linenums.append((line_num, previous_line_num))
615 elif previous_line and previous_line > line:
616 problem_linenums.append((line_num, previous_line_num))
617 elif custom_include_pattern.match(line):
618 if state != CUSTOM_INCLUDES:
619 state = CUSTOM_INCLUDES
620 elif previous_line and previous_line > line:
621 problem_linenums.append((line_num, previous_line_num))
622 else:
623 problem_linenums.append(line_num)
624 previous_line = line
625 previous_line_num = line_num
627 warnings = []
628 for (line_num, previous_line_num) in problem_linenums:
629 if line_num in changed_linenums or previous_line_num in changed_linenums:
630 warnings.append(' %s:%d' % (file_path, line_num))
631 return warnings
634 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
635 """Checks the #include order for the given file f."""
637 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
638 # Exclude the following includes from the check:
639 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
640 # specific order.
641 # 2) <atlbase.h>, "build/build_config.h"
642 excluded_include_pattern = input_api.re.compile(
643 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
644 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
645 # Match the final or penultimate token if it is xxxtest so we can ignore it
646 # when considering the special first include.
647 test_file_tag_pattern = input_api.re.compile(
648 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
649 if_pattern = input_api.re.compile(
650 r'\s*#\s*(if|elif|else|endif|define|undef).*')
651 # Some files need specialized order of includes; exclude such files from this
652 # check.
653 uncheckable_includes_pattern = input_api.re.compile(
654 r'\s*#include '
655 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
657 contents = f.NewContents()
658 warnings = []
659 line_num = 0
661 # Handle the special first include. If the first include file is
662 # some/path/file.h, the corresponding including file can be some/path/file.cc,
663 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
664 # etc. It's also possible that no special first include exists.
665 # If the included file is some/path/file_platform.h the including file could
666 # also be some/path/file_xxxtest_platform.h.
667 including_file_base_name = test_file_tag_pattern.sub(
668 '', input_api.os_path.basename(f.LocalPath()))
670 for line in contents:
671 line_num += 1
672 if system_include_pattern.match(line):
673 # No special first include -> process the line again along with normal
674 # includes.
675 line_num -= 1
676 break
677 match = custom_include_pattern.match(line)
678 if match:
679 match_dict = match.groupdict()
680 header_basename = test_file_tag_pattern.sub(
681 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
683 if header_basename not in including_file_base_name:
684 # No special first include -> process the line again along with normal
685 # includes.
686 line_num -= 1
687 break
689 # Split into scopes: Each region between #if and #endif is its own scope.
690 scopes = []
691 current_scope = []
692 for line in contents[line_num:]:
693 line_num += 1
694 if uncheckable_includes_pattern.match(line):
695 continue
696 if if_pattern.match(line):
697 scopes.append(current_scope)
698 current_scope = []
699 elif ((system_include_pattern.match(line) or
700 custom_include_pattern.match(line)) and
701 not excluded_include_pattern.match(line)):
702 current_scope.append((line_num, line))
703 scopes.append(current_scope)
705 for scope in scopes:
706 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
707 changed_linenums))
708 return warnings
711 def _CheckIncludeOrder(input_api, output_api):
712 """Checks that the #include order is correct.
714 1. The corresponding header for source files.
715 2. C system files in alphabetical order
716 3. C++ system files in alphabetical order
717 4. Project's .h files in alphabetical order
719 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
720 these rules separately.
722 def FileFilterIncludeOrder(affected_file):
723 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
724 return input_api.FilterSourceFile(affected_file, black_list=black_list)
726 warnings = []
727 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
728 if f.LocalPath().endswith(('.cc', '.h')):
729 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
730 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
732 results = []
733 if warnings:
734 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
735 warnings))
736 return results
739 def _CheckForVersionControlConflictsInFile(input_api, f):
740 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
741 errors = []
742 for line_num, line in f.ChangedContents():
743 if pattern.match(line):
744 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
745 return errors
748 def _CheckForVersionControlConflicts(input_api, output_api):
749 """Usually this is not intentional and will cause a compile failure."""
750 errors = []
751 for f in input_api.AffectedFiles():
752 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
754 results = []
755 if errors:
756 results.append(output_api.PresubmitError(
757 'Version control conflict markers found, please resolve.', errors))
758 return results
761 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
762 def FilterFile(affected_file):
763 """Filter function for use with input_api.AffectedSourceFiles,
764 below. This filters out everything except non-test files from
765 top-level directories that generally speaking should not hard-code
766 service URLs (e.g. src/android_webview/, src/content/ and others).
768 return input_api.FilterSourceFile(
769 affected_file,
770 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
771 black_list=(_EXCLUDED_PATHS +
772 _TEST_CODE_EXCLUDED_PATHS +
773 input_api.DEFAULT_BLACK_LIST))
775 base_pattern = '"[^"]*google\.com[^"]*"'
776 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
777 pattern = input_api.re.compile(base_pattern)
778 problems = [] # items are (filename, line_number, line)
779 for f in input_api.AffectedSourceFiles(FilterFile):
780 for line_num, line in f.ChangedContents():
781 if not comment_pattern.search(line) and pattern.search(line):
782 problems.append((f.LocalPath(), line_num, line))
784 if problems:
785 return [output_api.PresubmitPromptOrNotify(
786 'Most layers below src/chrome/ should not hardcode service URLs.\n'
787 'Are you sure this is correct?',
788 [' %s:%d: %s' % (
789 problem[0], problem[1], problem[2]) for problem in problems])]
790 else:
791 return []
794 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
795 """Makes sure there are no abbreviations in the name of PNG files.
797 pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
798 errors = []
799 for f in input_api.AffectedFiles(include_deletes=False):
800 if pattern.match(f.LocalPath()):
801 errors.append(' %s' % f.LocalPath())
803 results = []
804 if errors:
805 results.append(output_api.PresubmitError(
806 'The name of PNG files should not have abbreviations. \n'
807 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
808 'Contact oshima@chromium.org if you have questions.', errors))
809 return results
812 def _FilesToCheckForIncomingDeps(re, changed_lines):
813 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
814 a set of DEPS entries that we should look up.
816 For a directory (rather than a specific filename) we fake a path to
817 a specific filename by adding /DEPS. This is chosen as a file that
818 will seldom or never be subject to per-file include_rules.
820 # We ignore deps entries on auto-generated directories.
821 AUTO_GENERATED_DIRS = ['grit', 'jni']
823 # This pattern grabs the path without basename in the first
824 # parentheses, and the basename (if present) in the second. It
825 # relies on the simple heuristic that if there is a basename it will
826 # be a header file ending in ".h".
827 pattern = re.compile(
828 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
829 results = set()
830 for changed_line in changed_lines:
831 m = pattern.match(changed_line)
832 if m:
833 path = m.group(1)
834 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
835 if m.group(2):
836 results.add('%s%s' % (path, m.group(2)))
837 else:
838 results.add('%s/DEPS' % path)
839 return results
842 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
843 """When a dependency prefixed with + is added to a DEPS file, we
844 want to make sure that the change is reviewed by an OWNER of the
845 target file or directory, to avoid layering violations from being
846 introduced. This check verifies that this happens.
848 changed_lines = set()
849 for f in input_api.AffectedFiles():
850 filename = input_api.os_path.basename(f.LocalPath())
851 if filename == 'DEPS':
852 changed_lines |= set(line.strip()
853 for line_num, line
854 in f.ChangedContents())
855 if not changed_lines:
856 return []
858 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
859 changed_lines)
860 if not virtual_depended_on_files:
861 return []
863 if input_api.is_committing:
864 if input_api.tbr:
865 return [output_api.PresubmitNotifyResult(
866 '--tbr was specified, skipping OWNERS check for DEPS additions')]
867 if not input_api.change.issue:
868 return [output_api.PresubmitError(
869 "DEPS approval by OWNERS check failed: this change has "
870 "no Rietveld issue number, so we can't check it for approvals.")]
871 output = output_api.PresubmitError
872 else:
873 output = output_api.PresubmitNotifyResult
875 owners_db = input_api.owners_db
876 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
877 input_api,
878 owners_db.email_regexp,
879 approval_needed=input_api.is_committing)
881 owner_email = owner_email or input_api.change.author_email
883 reviewers_plus_owner = set(reviewers)
884 if owner_email:
885 reviewers_plus_owner.add(owner_email)
886 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
887 reviewers_plus_owner)
889 # We strip the /DEPS part that was added by
890 # _FilesToCheckForIncomingDeps to fake a path to a file in a
891 # directory.
892 def StripDeps(path):
893 start_deps = path.rfind('/DEPS')
894 if start_deps != -1:
895 return path[:start_deps]
896 else:
897 return path
898 unapproved_dependencies = ["'+%s'," % StripDeps(path)
899 for path in missing_files]
901 if unapproved_dependencies:
902 output_list = [
903 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
904 '\n '.join(sorted(unapproved_dependencies)))]
905 if not input_api.is_committing:
906 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
907 output_list.append(output(
908 'Suggested missing target path OWNERS:\n %s' %
909 '\n '.join(suggested_owners or [])))
910 return output_list
912 return []
915 def _CheckSpamLogging(input_api, output_api):
916 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
917 black_list = (_EXCLUDED_PATHS +
918 _TEST_CODE_EXCLUDED_PATHS +
919 input_api.DEFAULT_BLACK_LIST +
920 (r"^base[\\\/]logging\.h$",
921 r"^base[\\\/]logging\.cc$",
922 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
923 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
924 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
925 r"startup_browser_creator\.cc$",
926 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
927 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
928 r"diagnostics_writer\.cc$",
929 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
930 r"^chromecast[\\\/]",
931 r"^cloud_print[\\\/]",
932 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
933 r"gl_helper_benchmark\.cc$",
934 r"^courgette[\\\/]courgette_tool\.cc$",
935 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
936 r"^native_client_sdk[\\\/]",
937 r"^remoting[\\\/]base[\\\/]logging\.h$",
938 r"^remoting[\\\/]host[\\\/].*",
939 r"^sandbox[\\\/]linux[\\\/].*",
940 r"^tools[\\\/]",
941 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
942 r"^webkit[\\\/]browser[\\\/]fileapi[\\\/]" +
943 r"dump_file_system.cc$",))
944 source_file_filter = lambda x: input_api.FilterSourceFile(
945 x, white_list=(file_inclusion_pattern,), black_list=black_list)
947 log_info = []
948 printf = []
950 for f in input_api.AffectedSourceFiles(source_file_filter):
951 contents = input_api.ReadFile(f, 'rb')
952 if re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
953 log_info.append(f.LocalPath())
954 elif re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
955 log_info.append(f.LocalPath())
957 if re.search(r"\bprintf\(", contents):
958 printf.append(f.LocalPath())
959 elif re.search(r"\bfprintf\((stdout|stderr)", contents):
960 printf.append(f.LocalPath())
962 if log_info:
963 return [output_api.PresubmitError(
964 'These files spam the console log with LOG(INFO):',
965 items=log_info)]
966 if printf:
967 return [output_api.PresubmitError(
968 'These files spam the console log with printf/fprintf:',
969 items=printf)]
970 return []
973 def _CheckForAnonymousVariables(input_api, output_api):
974 """These types are all expected to hold locks while in scope and
975 so should never be anonymous (which causes them to be immediately
976 destroyed)."""
977 they_who_must_be_named = [
978 'base::AutoLock',
979 'base::AutoReset',
980 'base::AutoUnlock',
981 'SkAutoAlphaRestore',
982 'SkAutoBitmapShaderInstall',
983 'SkAutoBlitterChoose',
984 'SkAutoBounderCommit',
985 'SkAutoCallProc',
986 'SkAutoCanvasRestore',
987 'SkAutoCommentBlock',
988 'SkAutoDescriptor',
989 'SkAutoDisableDirectionCheck',
990 'SkAutoDisableOvalCheck',
991 'SkAutoFree',
992 'SkAutoGlyphCache',
993 'SkAutoHDC',
994 'SkAutoLockColors',
995 'SkAutoLockPixels',
996 'SkAutoMalloc',
997 'SkAutoMaskFreeImage',
998 'SkAutoMutexAcquire',
999 'SkAutoPathBoundsUpdate',
1000 'SkAutoPDFRelease',
1001 'SkAutoRasterClipValidate',
1002 'SkAutoRef',
1003 'SkAutoTime',
1004 'SkAutoTrace',
1005 'SkAutoUnref',
1007 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1008 # bad: base::AutoLock(lock.get());
1009 # not bad: base::AutoLock lock(lock.get());
1010 bad_pattern = input_api.re.compile(anonymous)
1011 # good: new base::AutoLock(lock.get())
1012 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1013 errors = []
1015 for f in input_api.AffectedFiles():
1016 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1017 continue
1018 for linenum, line in f.ChangedContents():
1019 if bad_pattern.search(line) and not good_pattern.search(line):
1020 errors.append('%s:%d' % (f.LocalPath(), linenum))
1022 if errors:
1023 return [output_api.PresubmitError(
1024 'These lines create anonymous variables that need to be named:',
1025 items=errors)]
1026 return []
1029 def _CheckCygwinShell(input_api, output_api):
1030 source_file_filter = lambda x: input_api.FilterSourceFile(
1031 x, white_list=(r'.+\.(gyp|gypi)$',))
1032 cygwin_shell = []
1034 for f in input_api.AffectedSourceFiles(source_file_filter):
1035 for linenum, line in f.ChangedContents():
1036 if 'msvs_cygwin_shell' in line:
1037 cygwin_shell.append(f.LocalPath())
1038 break
1040 if cygwin_shell:
1041 return [output_api.PresubmitError(
1042 'These files should not use msvs_cygwin_shell (the default is 0):',
1043 items=cygwin_shell)]
1044 return []
1047 def _CheckUserActionUpdate(input_api, output_api):
1048 """Checks if any new user action has been added."""
1049 if any('actions.xml' == input_api.os_path.basename(f) for f in
1050 input_api.LocalPaths()):
1051 # If actions.xml is already included in the changelist, the PRESUBMIT
1052 # for actions.xml will do a more complete presubmit check.
1053 return []
1055 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1056 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1057 current_actions = None
1058 for f in input_api.AffectedFiles(file_filter=file_filter):
1059 for line_num, line in f.ChangedContents():
1060 match = input_api.re.search(action_re, line)
1061 if match:
1062 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1063 # loaded only once.
1064 if not current_actions:
1065 with open('tools/metrics/actions/actions.xml') as actions_f:
1066 current_actions = actions_f.read()
1067 # Search for the matched user action name in |current_actions|.
1068 for action_name in match.groups():
1069 action = 'name="{0}"'.format(action_name)
1070 if action not in current_actions:
1071 return [output_api.PresubmitPromptWarning(
1072 'File %s line %d: %s is missing in '
1073 'tools/metrics/actions/actions.xml. Please run '
1074 'tools/metrics/actions/extract_actions.py to update.'
1075 % (f.LocalPath(), line_num, action_name))]
1076 return []
1079 def _GetJSONParseError(input_api, filename, eat_comments=True):
1080 try:
1081 contents = input_api.ReadFile(filename)
1082 if eat_comments:
1083 json_comment_eater = input_api.os_path.join(
1084 input_api.PresubmitLocalPath(),
1085 'tools', 'json_comment_eater', 'json_comment_eater.py')
1086 process = input_api.subprocess.Popen(
1087 [input_api.python_executable, json_comment_eater],
1088 stdin=input_api.subprocess.PIPE,
1089 stdout=input_api.subprocess.PIPE,
1090 universal_newlines=True)
1091 (contents, _) = process.communicate(input=contents)
1093 input_api.json.loads(contents)
1094 except ValueError as e:
1095 return e
1096 return None
1099 def _GetIDLParseError(input_api, filename):
1100 try:
1101 contents = input_api.ReadFile(filename)
1102 idl_schema = input_api.os_path.join(
1103 input_api.PresubmitLocalPath(),
1104 'tools', 'json_schema_compiler', 'idl_schema.py')
1105 process = input_api.subprocess.Popen(
1106 [input_api.python_executable, idl_schema],
1107 stdin=input_api.subprocess.PIPE,
1108 stdout=input_api.subprocess.PIPE,
1109 stderr=input_api.subprocess.PIPE,
1110 universal_newlines=True)
1111 (_, error) = process.communicate(input=contents)
1112 return error or None
1113 except ValueError as e:
1114 return e
1117 def _CheckParseErrors(input_api, output_api):
1118 """Check that IDL and JSON files do not contain syntax errors."""
1119 actions = {
1120 '.idl': _GetIDLParseError,
1121 '.json': _GetJSONParseError,
1123 # These paths contain test data and other known invalid JSON files.
1124 excluded_patterns = [
1125 r'test[\\\/]data[\\\/]',
1126 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1128 # Most JSON files are preprocessed and support comments, but these do not.
1129 json_no_comments_patterns = [
1130 r'^testing[\\\/]',
1132 # Only run IDL checker on files in these directories.
1133 idl_included_patterns = [
1134 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1135 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1138 def get_action(affected_file):
1139 filename = affected_file.LocalPath()
1140 return actions.get(input_api.os_path.splitext(filename)[1])
1142 def MatchesFile(patterns, path):
1143 for pattern in patterns:
1144 if input_api.re.search(pattern, path):
1145 return True
1146 return False
1148 def FilterFile(affected_file):
1149 action = get_action(affected_file)
1150 if not action:
1151 return False
1152 path = affected_file.LocalPath()
1154 if MatchesFile(excluded_patterns, path):
1155 return False
1157 if (action == _GetIDLParseError and
1158 not MatchesFile(idl_included_patterns, path)):
1159 return False
1160 return True
1162 results = []
1163 for affected_file in input_api.AffectedFiles(
1164 file_filter=FilterFile, include_deletes=False):
1165 action = get_action(affected_file)
1166 kwargs = {}
1167 if (action == _GetJSONParseError and
1168 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1169 kwargs['eat_comments'] = False
1170 parse_error = action(input_api,
1171 affected_file.AbsoluteLocalPath(),
1172 **kwargs)
1173 if parse_error:
1174 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1175 (affected_file.LocalPath(), parse_error)))
1176 return results
1179 def _CheckJavaStyle(input_api, output_api):
1180 """Runs checkstyle on changed java files and returns errors if any exist."""
1181 original_sys_path = sys.path
1182 try:
1183 sys.path = sys.path + [input_api.os_path.join(
1184 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1185 import checkstyle
1186 finally:
1187 # Restore sys.path to what it was before.
1188 sys.path = original_sys_path
1190 return checkstyle.RunCheckstyle(
1191 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml')
1194 _DEPRECATED_CSS = [
1195 # Values
1196 ( "-webkit-box", "flex" ),
1197 ( "-webkit-inline-box", "inline-flex" ),
1198 ( "-webkit-flex", "flex" ),
1199 ( "-webkit-inline-flex", "inline-flex" ),
1200 ( "-webkit-min-content", "min-content" ),
1201 ( "-webkit-max-content", "max-content" ),
1203 # Properties
1204 ( "-webkit-background-clip", "background-clip" ),
1205 ( "-webkit-background-origin", "background-origin" ),
1206 ( "-webkit-background-size", "background-size" ),
1207 ( "-webkit-box-shadow", "box-shadow" ),
1209 # Functions
1210 ( "-webkit-gradient", "gradient" ),
1211 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1212 ( "-webkit-linear-gradient", "linear-gradient" ),
1213 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1214 ( "-webkit-radial-gradient", "radial-gradient" ),
1215 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1218 def _CheckNoDeprecatedCSS(input_api, output_api):
1219 """ Make sure that we don't use deprecated CSS
1220 properties, functions or values. Our external
1221 documentation is ignored by the hooks as it
1222 needs to be consumed by WebKit. """
1223 results = []
1224 file_inclusion_pattern = (r".+\.css$")
1225 black_list = (_EXCLUDED_PATHS +
1226 _TEST_CODE_EXCLUDED_PATHS +
1227 input_api.DEFAULT_BLACK_LIST +
1228 (r"^chrome/common/extensions/docs",
1229 r"^chrome/docs",
1230 r"^native_client_sdk"))
1231 file_filter = lambda f: input_api.FilterSourceFile(
1232 f, white_list=file_inclusion_pattern, black_list=black_list)
1233 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1234 for line_num, line in fpath.ChangedContents():
1235 for (deprecated_value, value) in _DEPRECATED_CSS:
1236 if input_api.re.search(deprecated_value, line):
1237 results.append(output_api.PresubmitError(
1238 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1239 (fpath.LocalPath(), line_num, deprecated_value, value)))
1240 return results
1242 def _CommonChecks(input_api, output_api):
1243 """Checks common to both upload and commit."""
1244 results = []
1245 results.extend(input_api.canned_checks.PanProjectChecks(
1246 input_api, output_api,
1247 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1248 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1249 results.extend(
1250 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1251 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1252 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1253 results.extend(_CheckNoNewWStrings(input_api, output_api))
1254 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1255 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1256 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1257 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1258 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1259 results.extend(_CheckFilePermissions(input_api, output_api))
1260 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1261 results.extend(_CheckIncludeOrder(input_api, output_api))
1262 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1263 results.extend(_CheckPatchFiles(input_api, output_api))
1264 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1265 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1266 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1267 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1268 results.extend(
1269 input_api.canned_checks.CheckChangeHasNoTabs(
1270 input_api,
1271 output_api,
1272 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1273 results.extend(_CheckSpamLogging(input_api, output_api))
1274 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1275 results.extend(_CheckCygwinShell(input_api, output_api))
1276 results.extend(_CheckUserActionUpdate(input_api, output_api))
1277 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1278 results.extend(_CheckParseErrors(input_api, output_api))
1280 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1281 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1282 input_api, output_api,
1283 input_api.PresubmitLocalPath(),
1284 whitelist=[r'^PRESUBMIT_test\.py$']))
1285 return results
1288 def _CheckAuthorizedAuthor(input_api, output_api):
1289 """For non-googler/chromites committers, verify the author's email address is
1290 in AUTHORS.
1292 # TODO(maruel): Add it to input_api?
1293 import fnmatch
1295 author = input_api.change.author_email
1296 if not author:
1297 input_api.logging.info('No author, skipping AUTHOR check')
1298 return []
1299 authors_path = input_api.os_path.join(
1300 input_api.PresubmitLocalPath(), 'AUTHORS')
1301 valid_authors = (
1302 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1303 for line in open(authors_path))
1304 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1305 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1306 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1307 return [output_api.PresubmitPromptWarning(
1308 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1309 '\n'
1310 'http://www.chromium.org/developers/contributing-code and read the '
1311 '"Legal" section\n'
1312 'If you are a chromite, verify the contributor signed the CLA.') %
1313 author)]
1314 return []
1317 def _CheckPatchFiles(input_api, output_api):
1318 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1319 if f.LocalPath().endswith(('.orig', '.rej'))]
1320 if problems:
1321 return [output_api.PresubmitError(
1322 "Don't commit .rej and .orig files.", problems)]
1323 else:
1324 return []
1327 def _DidYouMeanOSMacro(bad_macro):
1328 try:
1329 return {'A': 'OS_ANDROID',
1330 'B': 'OS_BSD',
1331 'C': 'OS_CHROMEOS',
1332 'F': 'OS_FREEBSD',
1333 'L': 'OS_LINUX',
1334 'M': 'OS_MACOSX',
1335 'N': 'OS_NACL',
1336 'O': 'OS_OPENBSD',
1337 'P': 'OS_POSIX',
1338 'S': 'OS_SOLARIS',
1339 'W': 'OS_WIN'}[bad_macro[3].upper()]
1340 except KeyError:
1341 return ''
1344 def _CheckForInvalidOSMacrosInFile(input_api, f):
1345 """Check for sensible looking, totally invalid OS macros."""
1346 preprocessor_statement = input_api.re.compile(r'^\s*#')
1347 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1348 results = []
1349 for lnum, line in f.ChangedContents():
1350 if preprocessor_statement.search(line):
1351 for match in os_macro.finditer(line):
1352 if not match.group(1) in _VALID_OS_MACROS:
1353 good = _DidYouMeanOSMacro(match.group(1))
1354 did_you_mean = ' (did you mean %s?)' % good if good else ''
1355 results.append(' %s:%d %s%s' % (f.LocalPath(),
1356 lnum,
1357 match.group(1),
1358 did_you_mean))
1359 return results
1362 def _CheckForInvalidOSMacros(input_api, output_api):
1363 """Check all affected files for invalid OS macros."""
1364 bad_macros = []
1365 for f in input_api.AffectedFiles():
1366 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1367 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1369 if not bad_macros:
1370 return []
1372 return [output_api.PresubmitError(
1373 'Possibly invalid OS macro[s] found. Please fix your code\n'
1374 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1377 def CheckChangeOnUpload(input_api, output_api):
1378 results = []
1379 results.extend(_CommonChecks(input_api, output_api))
1380 results.extend(_CheckJavaStyle(input_api, output_api))
1381 return results
1384 def GetTryServerMasterForBot(bot):
1385 """Returns the Try Server master for the given bot.
1387 It tries to guess the master from the bot name, but may still fail
1388 and return None. There is no longer a default master.
1390 # Potentially ambiguous bot names are listed explicitly.
1391 master_map = {
1392 'linux_gpu': 'tryserver.chromium.gpu',
1393 'mac_gpu': 'tryserver.chromium.gpu',
1394 'win_gpu': 'tryserver.chromium.gpu',
1395 'chromium_presubmit': 'tryserver.chromium.linux',
1396 'blink_presubmit': 'tryserver.chromium.linux',
1397 'tools_build_presubmit': 'tryserver.chromium.linux',
1399 master = master_map.get(bot)
1400 if not master:
1401 if 'gpu' in bot:
1402 master = 'tryserver.chromium.gpu'
1403 elif 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1404 master = 'tryserver.chromium.linux'
1405 elif 'win' in bot:
1406 master = 'tryserver.chromium.win'
1407 elif 'mac' in bot or 'ios' in bot:
1408 master = 'tryserver.chromium.mac'
1409 return master
1412 def GetDefaultTryConfigs(bots=None):
1413 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1415 To add tests to this list, they MUST be in the the corresponding master's
1416 gatekeeper config. For example, anything on master.chromium would be closed by
1417 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1419 If 'bots' is specified, will only return configurations for bots in that list.
1422 standard_tests = [
1423 'base_unittests',
1424 'browser_tests',
1425 'cacheinvalidation_unittests',
1426 'check_deps',
1427 'check_deps2git',
1428 'content_browsertests',
1429 'content_unittests',
1430 'crypto_unittests',
1431 'gpu_unittests',
1432 'interactive_ui_tests',
1433 'ipc_tests',
1434 'jingle_unittests',
1435 'media_unittests',
1436 'net_unittests',
1437 'ppapi_unittests',
1438 'printing_unittests',
1439 'sql_unittests',
1440 'sync_unit_tests',
1441 'unit_tests',
1442 # Broken in release.
1443 #'url_unittests',
1444 #'webkit_unit_tests',
1447 builders_and_tests = {
1448 # TODO(maruel): Figure out a way to run 'sizes' where people can
1449 # effectively update the perf expectation correctly. This requires a
1450 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1451 # incremental build. Reference:
1452 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1453 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1454 # of this step as a try job failure.
1455 'android_aosp': ['compile'],
1456 'android_chromium_gn_compile_rel': ['compile'],
1457 'android_clang_dbg': ['slave_steps'],
1458 'android_dbg_tests_recipe': ['slave_steps'],
1459 'cros_x86': ['defaulttests'],
1460 'ios_dbg_simulator': [
1461 'compile',
1462 'base_unittests',
1463 'content_unittests',
1464 'crypto_unittests',
1465 'url_unittests',
1466 'net_unittests',
1467 'sql_unittests',
1468 'ui_unittests',
1470 'ios_rel_device': ['compile'],
1471 'linux_asan': ['compile'],
1472 'mac_asan': ['compile'],
1473 #TODO(stip): Change the name of this builder to reflect that it's release.
1474 'linux_gtk': standard_tests,
1475 'linux_chromeos_asan': ['compile'],
1476 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1477 'linux_chromium_chromeos_rel_swarming': ['defaulttests'],
1478 'linux_chromium_compile_dbg': ['defaulttests'],
1479 'linux_chromium_gn_rel': ['defaulttests'],
1480 'linux_chromium_rel_swarming': ['defaulttests'],
1481 'linux_chromium_clang_dbg': ['defaulttests'],
1482 'linux_gpu': ['defaulttests'],
1483 'linux_nacl_sdk_build': ['compile'],
1484 'mac_chromium_compile_dbg': ['defaulttests'],
1485 'mac_chromium_rel_swarming': ['defaulttests'],
1486 'mac_gpu': ['defaulttests'],
1487 'mac_nacl_sdk_build': ['compile'],
1488 'win_chromium_compile_dbg': ['defaulttests'],
1489 'win_chromium_dbg': ['defaulttests'],
1490 'win_chromium_rel_swarming': ['defaulttests'],
1491 'win_chromium_x64_rel_swarming': ['defaulttests'],
1492 'win_gpu': ['defaulttests'],
1493 'win_nacl_sdk_build': ['compile'],
1494 'win8_chromium_rel': ['defaulttests'],
1497 if bots:
1498 filtered_builders_and_tests = dict((bot, set(builders_and_tests[bot]))
1499 for bot in bots)
1500 else:
1501 filtered_builders_and_tests = dict(
1502 (bot, set(tests))
1503 for bot, tests in builders_and_tests.iteritems())
1505 # Build up the mapping from tryserver master to bot/test.
1506 out = dict()
1507 for bot, tests in filtered_builders_and_tests.iteritems():
1508 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1509 return out
1512 def CheckChangeOnCommit(input_api, output_api):
1513 results = []
1514 results.extend(_CommonChecks(input_api, output_api))
1515 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1516 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1517 # input_api, output_api, sources))
1518 # Make sure the tree is 'open'.
1519 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1520 input_api,
1521 output_api,
1522 json_url='http://chromium-status.appspot.com/current?format=json'))
1524 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1525 input_api, output_api))
1526 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1527 input_api, output_api))
1528 return results
1531 def GetPreferredTryMasters(project, change):
1532 files = change.LocalPaths()
1534 if not files or all(re.search(r'[\\\/]OWNERS$', f) for f in files):
1535 return {}
1537 if all(re.search(r'\.(m|mm)$|(^|[\\\/_])mac[\\\/_.]', f) for f in files):
1538 return GetDefaultTryConfigs([
1539 'mac_chromium_compile_dbg',
1540 'mac_chromium_rel_swarming',
1542 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1543 return GetDefaultTryConfigs([
1544 'win_chromium_dbg',
1545 'win_chromium_rel_swarming',
1546 'win8_chromium_rel',
1548 if all(re.search(r'(^|[\\\/_])android[\\\/_.]', f) for f in files):
1549 return GetDefaultTryConfigs([
1550 'android_aosp',
1551 'android_clang_dbg',
1552 'android_dbg_tests_recipe',
1554 if all(re.search(r'[\\\/_]ios[\\\/_.]', f) for f in files):
1555 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1557 builders = [
1558 'android_chromium_gn_compile_rel',
1559 'android_clang_dbg',
1560 'android_dbg_tests_recipe',
1561 'ios_dbg_simulator',
1562 'ios_rel_device',
1563 'linux_chromium_chromeos_rel_swarming',
1564 'linux_chromium_clang_dbg',
1565 'linux_chromium_gn_rel',
1566 'linux_chromium_rel_swarming',
1567 'linux_gpu',
1568 'mac_chromium_compile_dbg',
1569 'mac_chromium_rel_swarming',
1570 'mac_gpu',
1571 'win_chromium_compile_dbg',
1572 'win_chromium_rel_swarming',
1573 'win_chromium_x64_rel_swarming',
1574 'win_gpu',
1575 'win8_chromium_rel',
1578 # Match things like path/aura/file.cc and path/file_aura.cc.
1579 # Same for chromeos.
1580 if any(re.search(r'[\\\/_](aura|chromeos)', f) for f in files):
1581 builders.extend([
1582 'linux_chromeos_asan',
1583 'linux_chromium_chromeos_clang_dbg'
1586 # If there are gyp changes to base, build, or chromeos, run a full cros build
1587 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1588 # files have a much higher chance of breaking the cros build, which is
1589 # differnt from the linux_chromeos build that most chrome developers test
1590 # with.
1591 if any(re.search('^(base|build|chromeos).*\.gypi?$', f) for f in files):
1592 builders.extend(['cros_x86'])
1594 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1595 # unless they're .gyp(i) files as changes to those files can break the gyp
1596 # step on that bot.
1597 if (not all(re.search('^chrome', f) for f in files) or
1598 any(re.search('\.gypi?$', f) for f in files)):
1599 builders.extend(['android_aosp'])
1601 return GetDefaultTryConfigs(builders)