Add infobars API in stable for whitelisted extensions
[chromium-blink-merge.git] / PRESUBMIT.py
blobd245a78f7b03a7cc5dba748412e861f979fd7ca6
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
9 """
12 import re
13 import sys
16 _EXCLUDED_PATHS = (
17 r"^breakpad[\\\/].*",
18 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
22 r"^skia[\\\/].*",
23 r"^v8[\\\/].*",
24 r".*MakeFile$",
25 r".+_autogen\.h$",
26 r".+[\\\/]pnacl_shim\.c$",
27 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
33 _TESTRUNNER_PATHS = (
34 r"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
43 # (best effort).
44 _TEST_CODE_EXCLUDED_PATHS = (
45 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
47 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS,
49 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.*[\\\/](test|tool(s)?)[\\\/].*',
51 # content_shell is used for running layout tests.
52 r'content[\\\/]shell[\\\/].*',
53 # At request of folks maintaining this folder.
54 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
55 # Non-production example code.
56 r'mojo[\\\/]examples[\\\/].*',
57 # Launcher for running iOS tests on the simulator.
58 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
61 _TEST_ONLY_WARNING = (
62 'You might be calling functions intended only for testing from\n'
63 'production code. It is OK to ignore this warning if you know what\n'
64 'you are doing, as the heuristics used to detect the situation are\n'
65 'not perfect. The commit queue will not block on this warning.')
68 _INCLUDE_ORDER_WARNING = (
69 'Your #include order seems to be broken. Send mail to\n'
70 'marja@chromium.org if this is not the case.')
73 _BANNED_OBJC_FUNCTIONS = (
75 'addTrackingRect:',
77 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
78 'prohibited. Please use CrTrackingArea instead.',
79 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
81 False,
84 r'/NSTrackingArea\W',
86 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
87 'instead.',
88 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
90 False,
93 'convertPointFromBase:',
95 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
96 'Please use |convertPoint:(point) fromView:nil| instead.',
97 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
99 True,
102 'convertPointToBase:',
104 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
105 'Please use |convertPoint:(point) toView:nil| instead.',
106 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
108 True,
111 'convertRectFromBase:',
113 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
114 'Please use |convertRect:(point) fromView:nil| instead.',
115 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
117 True,
120 'convertRectToBase:',
122 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
123 'Please use |convertRect:(point) toView:nil| instead.',
124 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
126 True,
129 'convertSizeFromBase:',
131 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
132 'Please use |convertSize:(point) fromView:nil| instead.',
133 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
135 True,
138 'convertSizeToBase:',
140 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
141 'Please use |convertSize:(point) toView:nil| instead.',
142 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
144 True,
149 _BANNED_CPP_FUNCTIONS = (
150 # Make sure that gtest's FRIEND_TEST() macro is not used; the
151 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
152 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
154 'FRIEND_TEST(',
156 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
157 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
159 False,
163 'ScopedAllowIO',
165 'New code should not use ScopedAllowIO. Post a task to the blocking',
166 'pool or the FILE thread instead.',
168 True,
170 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
171 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
172 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
173 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
174 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
175 r"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
176 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
177 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
181 'SkRefPtr',
183 'The use of SkRefPtr is prohibited. ',
184 'Please use skia::RefPtr instead.'
186 True,
190 'SkAutoRef',
192 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
193 'Please use skia::RefPtr instead.'
195 True,
199 'SkAutoTUnref',
201 'The use of SkAutoTUnref is dangerous because it implicitly ',
202 'converts to a raw pointer. Please use skia::RefPtr instead.'
204 True,
208 'SkAutoUnref',
210 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
211 'because it implicitly converts to a raw pointer. ',
212 'Please use skia::RefPtr instead.'
214 True,
218 r'/HANDLE_EINTR\(.*close',
220 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
221 'descriptor will be closed, and it is incorrect to retry the close.',
222 'Either call close directly and ignore its return value, or wrap close',
223 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
225 True,
229 r'/IGNORE_EINTR\((?!.*close)',
231 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
232 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
234 True,
236 # Files that #define IGNORE_EINTR.
237 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
238 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
242 r'/v8::Extension\(',
244 'Do not introduce new v8::Extensions into the code base, use',
245 'gin::Wrappable instead. See http://crbug.com/334679',
247 True,
249 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
254 _IPC_ENUM_TRAITS_DEPRECATED = (
255 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
256 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
259 _VALID_OS_MACROS = (
260 # Please keep sorted.
261 'OS_ANDROID',
262 'OS_ANDROID_HOST',
263 'OS_BSD',
264 'OS_CAT', # For testing.
265 'OS_CHROMEOS',
266 'OS_FREEBSD',
267 'OS_IOS',
268 'OS_LINUX',
269 'OS_MACOSX',
270 'OS_NACL',
271 'OS_OPENBSD',
272 'OS_POSIX',
273 'OS_QNX',
274 'OS_SOLARIS',
275 'OS_WIN',
279 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
280 """Attempts to prevent use of functions intended only for testing in
281 non-testing code. For now this is just a best-effort implementation
282 that ignores header files and may have some false positives. A
283 better implementation would probably need a proper C++ parser.
285 # We only scan .cc files and the like, as the declaration of
286 # for-testing functions in header files are hard to distinguish from
287 # calls to such functions without a proper C++ parser.
288 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
290 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
291 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
292 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
293 exclusion_pattern = input_api.re.compile(
294 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
295 base_function_pattern, base_function_pattern))
297 def FilterFile(affected_file):
298 black_list = (_EXCLUDED_PATHS +
299 _TEST_CODE_EXCLUDED_PATHS +
300 input_api.DEFAULT_BLACK_LIST)
301 return input_api.FilterSourceFile(
302 affected_file,
303 white_list=(file_inclusion_pattern, ),
304 black_list=black_list)
306 problems = []
307 for f in input_api.AffectedSourceFiles(FilterFile):
308 local_path = f.LocalPath()
309 for line_number, line in f.ChangedContents():
310 if (inclusion_pattern.search(line) and
311 not comment_pattern.search(line) and
312 not exclusion_pattern.search(line)):
313 problems.append(
314 '%s:%d\n %s' % (local_path, line_number, line.strip()))
316 if problems:
317 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
318 else:
319 return []
322 def _CheckNoIOStreamInHeaders(input_api, output_api):
323 """Checks to make sure no .h files include <iostream>."""
324 files = []
325 pattern = input_api.re.compile(r'^#include\s*<iostream>',
326 input_api.re.MULTILINE)
327 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
328 if not f.LocalPath().endswith('.h'):
329 continue
330 contents = input_api.ReadFile(f)
331 if pattern.search(contents):
332 files.append(f)
334 if len(files):
335 return [ output_api.PresubmitError(
336 'Do not #include <iostream> in header files, since it inserts static '
337 'initialization into every file including the header. Instead, '
338 '#include <ostream>. See http://crbug.com/94794',
339 files) ]
340 return []
343 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
344 """Checks to make sure no source files use UNIT_TEST"""
345 problems = []
346 for f in input_api.AffectedFiles():
347 if (not f.LocalPath().endswith(('.cc', '.mm'))):
348 continue
350 for line_num, line in f.ChangedContents():
351 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
352 problems.append(' %s:%d' % (f.LocalPath(), line_num))
354 if not problems:
355 return []
356 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
357 '\n'.join(problems))]
360 def _CheckNoNewWStrings(input_api, output_api):
361 """Checks to make sure we don't introduce use of wstrings."""
362 problems = []
363 for f in input_api.AffectedFiles():
364 if (not f.LocalPath().endswith(('.cc', '.h')) or
365 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
366 continue
368 allowWString = False
369 for line_num, line in f.ChangedContents():
370 if 'presubmit: allow wstring' in line:
371 allowWString = True
372 elif not allowWString and 'wstring' in line:
373 problems.append(' %s:%d' % (f.LocalPath(), line_num))
374 allowWString = False
375 else:
376 allowWString = False
378 if not problems:
379 return []
380 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
381 ' If you are calling a cross-platform API that accepts a wstring, '
382 'fix the API.\n' +
383 '\n'.join(problems))]
386 def _CheckNoDEPSGIT(input_api, output_api):
387 """Make sure .DEPS.git is never modified manually."""
388 if any(f.LocalPath().endswith('.DEPS.git') for f in
389 input_api.AffectedFiles()):
390 return [output_api.PresubmitError(
391 'Never commit changes to .DEPS.git. This file is maintained by an\n'
392 'automated system based on what\'s in DEPS and your changes will be\n'
393 'overwritten.\n'
394 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
395 'for more information')]
396 return []
399 def _CheckNoBannedFunctions(input_api, output_api):
400 """Make sure that banned functions are not used."""
401 warnings = []
402 errors = []
404 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
405 for f in input_api.AffectedFiles(file_filter=file_filter):
406 for line_num, line in f.ChangedContents():
407 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
408 matched = False
409 if func_name[0:1] == '/':
410 regex = func_name[1:]
411 if input_api.re.search(regex, line):
412 matched = True
413 elif func_name in line:
414 matched = True
415 if matched:
416 problems = warnings;
417 if error:
418 problems = errors;
419 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
420 for message_line in message:
421 problems.append(' %s' % message_line)
423 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
424 for f in input_api.AffectedFiles(file_filter=file_filter):
425 for line_num, line in f.ChangedContents():
426 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
427 def IsBlacklisted(affected_file, blacklist):
428 local_path = affected_file.LocalPath()
429 for item in blacklist:
430 if input_api.re.match(item, local_path):
431 return True
432 return False
433 if IsBlacklisted(f, excluded_paths):
434 continue
435 matched = False
436 if func_name[0:1] == '/':
437 regex = func_name[1:]
438 if input_api.re.search(regex, line):
439 matched = True
440 elif func_name in line:
441 matched = True
442 if matched:
443 problems = warnings;
444 if error:
445 problems = errors;
446 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
447 for message_line in message:
448 problems.append(' %s' % message_line)
450 result = []
451 if (warnings):
452 result.append(output_api.PresubmitPromptWarning(
453 'Banned functions were used.\n' + '\n'.join(warnings)))
454 if (errors):
455 result.append(output_api.PresubmitError(
456 'Banned functions were used.\n' + '\n'.join(errors)))
457 return result
460 def _CheckNoPragmaOnce(input_api, output_api):
461 """Make sure that banned functions are not used."""
462 files = []
463 pattern = input_api.re.compile(r'^#pragma\s+once',
464 input_api.re.MULTILINE)
465 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
466 if not f.LocalPath().endswith('.h'):
467 continue
468 contents = input_api.ReadFile(f)
469 if pattern.search(contents):
470 files.append(f)
472 if files:
473 return [output_api.PresubmitError(
474 'Do not use #pragma once in header files.\n'
475 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
476 files)]
477 return []
480 def _CheckNoTrinaryTrueFalse(input_api, output_api):
481 """Checks to make sure we don't introduce use of foo ? true : false."""
482 problems = []
483 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
484 for f in input_api.AffectedFiles():
485 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
486 continue
488 for line_num, line in f.ChangedContents():
489 if pattern.match(line):
490 problems.append(' %s:%d' % (f.LocalPath(), line_num))
492 if not problems:
493 return []
494 return [output_api.PresubmitPromptWarning(
495 'Please consider avoiding the "? true : false" pattern if possible.\n' +
496 '\n'.join(problems))]
499 def _CheckUnwantedDependencies(input_api, output_api):
500 """Runs checkdeps on #include statements added in this
501 change. Breaking - rules is an error, breaking ! rules is a
502 warning.
504 # We need to wait until we have an input_api object and use this
505 # roundabout construct to import checkdeps because this file is
506 # eval-ed and thus doesn't have __file__.
507 original_sys_path = sys.path
508 try:
509 sys.path = sys.path + [input_api.os_path.join(
510 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
511 import checkdeps
512 from cpp_checker import CppChecker
513 from rules import Rule
514 finally:
515 # Restore sys.path to what it was before.
516 sys.path = original_sys_path
518 added_includes = []
519 for f in input_api.AffectedFiles():
520 if not CppChecker.IsCppFile(f.LocalPath()):
521 continue
523 changed_lines = [line for line_num, line in f.ChangedContents()]
524 added_includes.append([f.LocalPath(), changed_lines])
526 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
528 error_descriptions = []
529 warning_descriptions = []
530 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
531 added_includes):
532 description_with_path = '%s\n %s' % (path, rule_description)
533 if rule_type == Rule.DISALLOW:
534 error_descriptions.append(description_with_path)
535 else:
536 warning_descriptions.append(description_with_path)
538 results = []
539 if error_descriptions:
540 results.append(output_api.PresubmitError(
541 'You added one or more #includes that violate checkdeps rules.',
542 error_descriptions))
543 if warning_descriptions:
544 results.append(output_api.PresubmitPromptOrNotify(
545 'You added one or more #includes of files that are temporarily\n'
546 'allowed but being removed. Can you avoid introducing the\n'
547 '#include? See relevant DEPS file(s) for details and contacts.',
548 warning_descriptions))
549 return results
552 def _CheckFilePermissions(input_api, output_api):
553 """Check that all files have their permissions properly set."""
554 if input_api.platform == 'win32':
555 return []
556 args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
557 input_api.change.RepositoryRoot()]
558 for f in input_api.AffectedFiles():
559 args += ['--file', f.LocalPath()]
560 checkperms = input_api.subprocess.Popen(args,
561 stdout=input_api.subprocess.PIPE)
562 errors = checkperms.communicate()[0].strip()
563 if errors:
564 return [output_api.PresubmitError('checkperms.py failed.',
565 errors.splitlines())]
566 return []
569 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
570 """Makes sure we don't include ui/aura/window_property.h
571 in header files.
573 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
574 errors = []
575 for f in input_api.AffectedFiles():
576 if not f.LocalPath().endswith('.h'):
577 continue
578 for line_num, line in f.ChangedContents():
579 if pattern.match(line):
580 errors.append(' %s:%d' % (f.LocalPath(), line_num))
582 results = []
583 if errors:
584 results.append(output_api.PresubmitError(
585 'Header files should not include ui/aura/window_property.h', errors))
586 return results
589 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
590 """Checks that the lines in scope occur in the right order.
592 1. C system files in alphabetical order
593 2. C++ system files in alphabetical order
594 3. Project's .h files
597 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
598 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
599 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
601 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
603 state = C_SYSTEM_INCLUDES
605 previous_line = ''
606 previous_line_num = 0
607 problem_linenums = []
608 for line_num, line in scope:
609 if c_system_include_pattern.match(line):
610 if state != C_SYSTEM_INCLUDES:
611 problem_linenums.append((line_num, previous_line_num))
612 elif previous_line and previous_line > line:
613 problem_linenums.append((line_num, previous_line_num))
614 elif cpp_system_include_pattern.match(line):
615 if state == C_SYSTEM_INCLUDES:
616 state = CPP_SYSTEM_INCLUDES
617 elif state == CUSTOM_INCLUDES:
618 problem_linenums.append((line_num, previous_line_num))
619 elif previous_line and previous_line > line:
620 problem_linenums.append((line_num, previous_line_num))
621 elif custom_include_pattern.match(line):
622 if state != CUSTOM_INCLUDES:
623 state = CUSTOM_INCLUDES
624 elif previous_line and previous_line > line:
625 problem_linenums.append((line_num, previous_line_num))
626 else:
627 problem_linenums.append(line_num)
628 previous_line = line
629 previous_line_num = line_num
631 warnings = []
632 for (line_num, previous_line_num) in problem_linenums:
633 if line_num in changed_linenums or previous_line_num in changed_linenums:
634 warnings.append(' %s:%d' % (file_path, line_num))
635 return warnings
638 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
639 """Checks the #include order for the given file f."""
641 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
642 # Exclude the following includes from the check:
643 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
644 # specific order.
645 # 2) <atlbase.h>, "build/build_config.h"
646 excluded_include_pattern = input_api.re.compile(
647 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
648 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
649 # Match the final or penultimate token if it is xxxtest so we can ignore it
650 # when considering the special first include.
651 test_file_tag_pattern = input_api.re.compile(
652 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
653 if_pattern = input_api.re.compile(
654 r'\s*#\s*(if|elif|else|endif|define|undef).*')
655 # Some files need specialized order of includes; exclude such files from this
656 # check.
657 uncheckable_includes_pattern = input_api.re.compile(
658 r'\s*#include '
659 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
661 contents = f.NewContents()
662 warnings = []
663 line_num = 0
665 # Handle the special first include. If the first include file is
666 # some/path/file.h, the corresponding including file can be some/path/file.cc,
667 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
668 # etc. It's also possible that no special first include exists.
669 # If the included file is some/path/file_platform.h the including file could
670 # also be some/path/file_xxxtest_platform.h.
671 including_file_base_name = test_file_tag_pattern.sub(
672 '', input_api.os_path.basename(f.LocalPath()))
674 for line in contents:
675 line_num += 1
676 if system_include_pattern.match(line):
677 # No special first include -> process the line again along with normal
678 # includes.
679 line_num -= 1
680 break
681 match = custom_include_pattern.match(line)
682 if match:
683 match_dict = match.groupdict()
684 header_basename = test_file_tag_pattern.sub(
685 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
687 if header_basename not in including_file_base_name:
688 # No special first include -> process the line again along with normal
689 # includes.
690 line_num -= 1
691 break
693 # Split into scopes: Each region between #if and #endif is its own scope.
694 scopes = []
695 current_scope = []
696 for line in contents[line_num:]:
697 line_num += 1
698 if uncheckable_includes_pattern.match(line):
699 continue
700 if if_pattern.match(line):
701 scopes.append(current_scope)
702 current_scope = []
703 elif ((system_include_pattern.match(line) or
704 custom_include_pattern.match(line)) and
705 not excluded_include_pattern.match(line)):
706 current_scope.append((line_num, line))
707 scopes.append(current_scope)
709 for scope in scopes:
710 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
711 changed_linenums))
712 return warnings
715 def _CheckIncludeOrder(input_api, output_api):
716 """Checks that the #include order is correct.
718 1. The corresponding header for source files.
719 2. C system files in alphabetical order
720 3. C++ system files in alphabetical order
721 4. Project's .h files in alphabetical order
723 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
724 these rules separately.
726 def FileFilterIncludeOrder(affected_file):
727 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
728 return input_api.FilterSourceFile(affected_file, black_list=black_list)
730 warnings = []
731 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
732 if f.LocalPath().endswith(('.cc', '.h')):
733 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
734 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
736 results = []
737 if warnings:
738 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
739 warnings))
740 return results
743 def _CheckForVersionControlConflictsInFile(input_api, f):
744 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
745 errors = []
746 for line_num, line in f.ChangedContents():
747 if pattern.match(line):
748 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
749 return errors
752 def _CheckForVersionControlConflicts(input_api, output_api):
753 """Usually this is not intentional and will cause a compile failure."""
754 errors = []
755 for f in input_api.AffectedFiles():
756 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
758 results = []
759 if errors:
760 results.append(output_api.PresubmitError(
761 'Version control conflict markers found, please resolve.', errors))
762 return results
765 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
766 def FilterFile(affected_file):
767 """Filter function for use with input_api.AffectedSourceFiles,
768 below. This filters out everything except non-test files from
769 top-level directories that generally speaking should not hard-code
770 service URLs (e.g. src/android_webview/, src/content/ and others).
772 return input_api.FilterSourceFile(
773 affected_file,
774 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
775 black_list=(_EXCLUDED_PATHS +
776 _TEST_CODE_EXCLUDED_PATHS +
777 input_api.DEFAULT_BLACK_LIST))
779 base_pattern = '"[^"]*google\.com[^"]*"'
780 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
781 pattern = input_api.re.compile(base_pattern)
782 problems = [] # items are (filename, line_number, line)
783 for f in input_api.AffectedSourceFiles(FilterFile):
784 for line_num, line in f.ChangedContents():
785 if not comment_pattern.search(line) and pattern.search(line):
786 problems.append((f.LocalPath(), line_num, line))
788 if problems:
789 return [output_api.PresubmitPromptOrNotify(
790 'Most layers below src/chrome/ should not hardcode service URLs.\n'
791 'Are you sure this is correct?',
792 [' %s:%d: %s' % (
793 problem[0], problem[1], problem[2]) for problem in problems])]
794 else:
795 return []
798 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
799 """Makes sure there are no abbreviations in the name of PNG files.
801 pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
802 errors = []
803 for f in input_api.AffectedFiles(include_deletes=False):
804 if pattern.match(f.LocalPath()):
805 errors.append(' %s' % f.LocalPath())
807 results = []
808 if errors:
809 results.append(output_api.PresubmitError(
810 'The name of PNG files should not have abbreviations. \n'
811 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
812 'Contact oshima@chromium.org if you have questions.', errors))
813 return results
816 def _FilesToCheckForIncomingDeps(re, changed_lines):
817 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
818 a set of DEPS entries that we should look up.
820 For a directory (rather than a specific filename) we fake a path to
821 a specific filename by adding /DEPS. This is chosen as a file that
822 will seldom or never be subject to per-file include_rules.
824 # We ignore deps entries on auto-generated directories.
825 AUTO_GENERATED_DIRS = ['grit', 'jni']
827 # This pattern grabs the path without basename in the first
828 # parentheses, and the basename (if present) in the second. It
829 # relies on the simple heuristic that if there is a basename it will
830 # be a header file ending in ".h".
831 pattern = re.compile(
832 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
833 results = set()
834 for changed_line in changed_lines:
835 m = pattern.match(changed_line)
836 if m:
837 path = m.group(1)
838 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
839 if m.group(2):
840 results.add('%s%s' % (path, m.group(2)))
841 else:
842 results.add('%s/DEPS' % path)
843 return results
846 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
847 """When a dependency prefixed with + is added to a DEPS file, we
848 want to make sure that the change is reviewed by an OWNER of the
849 target file or directory, to avoid layering violations from being
850 introduced. This check verifies that this happens.
852 changed_lines = set()
853 for f in input_api.AffectedFiles():
854 filename = input_api.os_path.basename(f.LocalPath())
855 if filename == 'DEPS':
856 changed_lines |= set(line.strip()
857 for line_num, line
858 in f.ChangedContents())
859 if not changed_lines:
860 return []
862 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
863 changed_lines)
864 if not virtual_depended_on_files:
865 return []
867 if input_api.is_committing:
868 if input_api.tbr:
869 return [output_api.PresubmitNotifyResult(
870 '--tbr was specified, skipping OWNERS check for DEPS additions')]
871 if not input_api.change.issue:
872 return [output_api.PresubmitError(
873 "DEPS approval by OWNERS check failed: this change has "
874 "no Rietveld issue number, so we can't check it for approvals.")]
875 output = output_api.PresubmitError
876 else:
877 output = output_api.PresubmitNotifyResult
879 owners_db = input_api.owners_db
880 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
881 input_api,
882 owners_db.email_regexp,
883 approval_needed=input_api.is_committing)
885 owner_email = owner_email or input_api.change.author_email
887 reviewers_plus_owner = set(reviewers)
888 if owner_email:
889 reviewers_plus_owner.add(owner_email)
890 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
891 reviewers_plus_owner)
893 # We strip the /DEPS part that was added by
894 # _FilesToCheckForIncomingDeps to fake a path to a file in a
895 # directory.
896 def StripDeps(path):
897 start_deps = path.rfind('/DEPS')
898 if start_deps != -1:
899 return path[:start_deps]
900 else:
901 return path
902 unapproved_dependencies = ["'+%s'," % StripDeps(path)
903 for path in missing_files]
905 if unapproved_dependencies:
906 output_list = [
907 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
908 '\n '.join(sorted(unapproved_dependencies)))]
909 if not input_api.is_committing:
910 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
911 output_list.append(output(
912 'Suggested missing target path OWNERS:\n %s' %
913 '\n '.join(suggested_owners or [])))
914 return output_list
916 return []
919 def _CheckSpamLogging(input_api, output_api):
920 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
921 black_list = (_EXCLUDED_PATHS +
922 _TEST_CODE_EXCLUDED_PATHS +
923 input_api.DEFAULT_BLACK_LIST +
924 (r"^base[\\\/]logging\.h$",
925 r"^base[\\\/]logging\.cc$",
926 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
927 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
928 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
929 r"startup_browser_creator\.cc$",
930 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
931 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
932 r"diagnostics_writer\.cc$",
933 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
934 r"^chromecast[\\\/]",
935 r"^cloud_print[\\\/]",
936 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
937 r"gl_helper_benchmark\.cc$",
938 r"^courgette[\\\/]courgette_tool\.cc$",
939 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
940 r"^native_client_sdk[\\\/]",
941 r"^remoting[\\\/]base[\\\/]logging\.h$",
942 r"^remoting[\\\/]host[\\\/].*",
943 r"^sandbox[\\\/]linux[\\\/].*",
944 r"^tools[\\\/]",
945 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
946 r"^webkit[\\\/]browser[\\\/]fileapi[\\\/]" +
947 r"dump_file_system.cc$",))
948 source_file_filter = lambda x: input_api.FilterSourceFile(
949 x, white_list=(file_inclusion_pattern,), black_list=black_list)
951 log_info = []
952 printf = []
954 for f in input_api.AffectedSourceFiles(source_file_filter):
955 contents = input_api.ReadFile(f, 'rb')
956 if re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
957 log_info.append(f.LocalPath())
958 elif re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
959 log_info.append(f.LocalPath())
961 if re.search(r"\bprintf\(", contents):
962 printf.append(f.LocalPath())
963 elif re.search(r"\bfprintf\((stdout|stderr)", contents):
964 printf.append(f.LocalPath())
966 if log_info:
967 return [output_api.PresubmitError(
968 'These files spam the console log with LOG(INFO):',
969 items=log_info)]
970 if printf:
971 return [output_api.PresubmitError(
972 'These files spam the console log with printf/fprintf:',
973 items=printf)]
974 return []
977 def _CheckForAnonymousVariables(input_api, output_api):
978 """These types are all expected to hold locks while in scope and
979 so should never be anonymous (which causes them to be immediately
980 destroyed)."""
981 they_who_must_be_named = [
982 'base::AutoLock',
983 'base::AutoReset',
984 'base::AutoUnlock',
985 'SkAutoAlphaRestore',
986 'SkAutoBitmapShaderInstall',
987 'SkAutoBlitterChoose',
988 'SkAutoBounderCommit',
989 'SkAutoCallProc',
990 'SkAutoCanvasRestore',
991 'SkAutoCommentBlock',
992 'SkAutoDescriptor',
993 'SkAutoDisableDirectionCheck',
994 'SkAutoDisableOvalCheck',
995 'SkAutoFree',
996 'SkAutoGlyphCache',
997 'SkAutoHDC',
998 'SkAutoLockColors',
999 'SkAutoLockPixels',
1000 'SkAutoMalloc',
1001 'SkAutoMaskFreeImage',
1002 'SkAutoMutexAcquire',
1003 'SkAutoPathBoundsUpdate',
1004 'SkAutoPDFRelease',
1005 'SkAutoRasterClipValidate',
1006 'SkAutoRef',
1007 'SkAutoTime',
1008 'SkAutoTrace',
1009 'SkAutoUnref',
1011 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1012 # bad: base::AutoLock(lock.get());
1013 # not bad: base::AutoLock lock(lock.get());
1014 bad_pattern = input_api.re.compile(anonymous)
1015 # good: new base::AutoLock(lock.get())
1016 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1017 errors = []
1019 for f in input_api.AffectedFiles():
1020 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1021 continue
1022 for linenum, line in f.ChangedContents():
1023 if bad_pattern.search(line) and not good_pattern.search(line):
1024 errors.append('%s:%d' % (f.LocalPath(), linenum))
1026 if errors:
1027 return [output_api.PresubmitError(
1028 'These lines create anonymous variables that need to be named:',
1029 items=errors)]
1030 return []
1033 def _CheckCygwinShell(input_api, output_api):
1034 source_file_filter = lambda x: input_api.FilterSourceFile(
1035 x, white_list=(r'.+\.(gyp|gypi)$',))
1036 cygwin_shell = []
1038 for f in input_api.AffectedSourceFiles(source_file_filter):
1039 for linenum, line in f.ChangedContents():
1040 if 'msvs_cygwin_shell' in line:
1041 cygwin_shell.append(f.LocalPath())
1042 break
1044 if cygwin_shell:
1045 return [output_api.PresubmitError(
1046 'These files should not use msvs_cygwin_shell (the default is 0):',
1047 items=cygwin_shell)]
1048 return []
1051 def _CheckUserActionUpdate(input_api, output_api):
1052 """Checks if any new user action has been added."""
1053 if any('actions.xml' == input_api.os_path.basename(f) for f in
1054 input_api.LocalPaths()):
1055 # If actions.xml is already included in the changelist, the PRESUBMIT
1056 # for actions.xml will do a more complete presubmit check.
1057 return []
1059 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1060 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1061 current_actions = None
1062 for f in input_api.AffectedFiles(file_filter=file_filter):
1063 for line_num, line in f.ChangedContents():
1064 match = input_api.re.search(action_re, line)
1065 if match:
1066 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1067 # loaded only once.
1068 if not current_actions:
1069 with open('tools/metrics/actions/actions.xml') as actions_f:
1070 current_actions = actions_f.read()
1071 # Search for the matched user action name in |current_actions|.
1072 for action_name in match.groups():
1073 action = 'name="{0}"'.format(action_name)
1074 if action not in current_actions:
1075 return [output_api.PresubmitPromptWarning(
1076 'File %s line %d: %s is missing in '
1077 'tools/metrics/actions/actions.xml. Please run '
1078 'tools/metrics/actions/extract_actions.py to update.'
1079 % (f.LocalPath(), line_num, action_name))]
1080 return []
1083 def _GetJSONParseError(input_api, filename, eat_comments=True):
1084 try:
1085 contents = input_api.ReadFile(filename)
1086 if eat_comments:
1087 json_comment_eater = input_api.os_path.join(
1088 input_api.PresubmitLocalPath(),
1089 'tools', 'json_comment_eater', 'json_comment_eater.py')
1090 process = input_api.subprocess.Popen(
1091 [input_api.python_executable, json_comment_eater],
1092 stdin=input_api.subprocess.PIPE,
1093 stdout=input_api.subprocess.PIPE,
1094 universal_newlines=True)
1095 (contents, _) = process.communicate(input=contents)
1097 input_api.json.loads(contents)
1098 except ValueError as e:
1099 return e
1100 return None
1103 def _GetIDLParseError(input_api, filename):
1104 try:
1105 contents = input_api.ReadFile(filename)
1106 idl_schema = input_api.os_path.join(
1107 input_api.PresubmitLocalPath(),
1108 'tools', 'json_schema_compiler', 'idl_schema.py')
1109 process = input_api.subprocess.Popen(
1110 [input_api.python_executable, idl_schema],
1111 stdin=input_api.subprocess.PIPE,
1112 stdout=input_api.subprocess.PIPE,
1113 stderr=input_api.subprocess.PIPE,
1114 universal_newlines=True)
1115 (_, error) = process.communicate(input=contents)
1116 return error or None
1117 except ValueError as e:
1118 return e
1121 def _CheckParseErrors(input_api, output_api):
1122 """Check that IDL and JSON files do not contain syntax errors."""
1123 actions = {
1124 '.idl': _GetIDLParseError,
1125 '.json': _GetJSONParseError,
1127 # These paths contain test data and other known invalid JSON files.
1128 excluded_patterns = [
1129 r'test[\\\/]data[\\\/]',
1130 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1132 # Most JSON files are preprocessed and support comments, but these do not.
1133 json_no_comments_patterns = [
1134 r'^testing[\\\/]',
1136 # Only run IDL checker on files in these directories.
1137 idl_included_patterns = [
1138 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1139 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1142 def get_action(affected_file):
1143 filename = affected_file.LocalPath()
1144 return actions.get(input_api.os_path.splitext(filename)[1])
1146 def MatchesFile(patterns, path):
1147 for pattern in patterns:
1148 if input_api.re.search(pattern, path):
1149 return True
1150 return False
1152 def FilterFile(affected_file):
1153 action = get_action(affected_file)
1154 if not action:
1155 return False
1156 path = affected_file.LocalPath()
1158 if MatchesFile(excluded_patterns, path):
1159 return False
1161 if (action == _GetIDLParseError and
1162 not MatchesFile(idl_included_patterns, path)):
1163 return False
1164 return True
1166 results = []
1167 for affected_file in input_api.AffectedFiles(
1168 file_filter=FilterFile, include_deletes=False):
1169 action = get_action(affected_file)
1170 kwargs = {}
1171 if (action == _GetJSONParseError and
1172 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1173 kwargs['eat_comments'] = False
1174 parse_error = action(input_api,
1175 affected_file.AbsoluteLocalPath(),
1176 **kwargs)
1177 if parse_error:
1178 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1179 (affected_file.LocalPath(), parse_error)))
1180 return results
1183 def _CheckJavaStyle(input_api, output_api):
1184 """Runs checkstyle on changed java files and returns errors if any exist."""
1185 original_sys_path = sys.path
1186 try:
1187 sys.path = sys.path + [input_api.os_path.join(
1188 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1189 import checkstyle
1190 finally:
1191 # Restore sys.path to what it was before.
1192 sys.path = original_sys_path
1194 return checkstyle.RunCheckstyle(
1195 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml')
1198 _DEPRECATED_CSS = [
1199 # Values
1200 ( "-webkit-box", "flex" ),
1201 ( "-webkit-inline-box", "inline-flex" ),
1202 ( "-webkit-flex", "flex" ),
1203 ( "-webkit-inline-flex", "inline-flex" ),
1204 ( "-webkit-min-content", "min-content" ),
1205 ( "-webkit-max-content", "max-content" ),
1207 # Properties
1208 ( "-webkit-background-clip", "background-clip" ),
1209 ( "-webkit-background-origin", "background-origin" ),
1210 ( "-webkit-background-size", "background-size" ),
1211 ( "-webkit-box-shadow", "box-shadow" ),
1213 # Functions
1214 ( "-webkit-gradient", "gradient" ),
1215 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1216 ( "-webkit-linear-gradient", "linear-gradient" ),
1217 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1218 ( "-webkit-radial-gradient", "radial-gradient" ),
1219 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1222 def _CheckNoDeprecatedCSS(input_api, output_api):
1223 """ Make sure that we don't use deprecated CSS
1224 properties, functions or values. Our external
1225 documentation is ignored by the hooks as it
1226 needs to be consumed by WebKit. """
1227 results = []
1228 file_inclusion_pattern = (r".+\.css$")
1229 black_list = (_EXCLUDED_PATHS +
1230 _TEST_CODE_EXCLUDED_PATHS +
1231 input_api.DEFAULT_BLACK_LIST +
1232 (r"^chrome/common/extensions/docs",
1233 r"^chrome/docs",
1234 r"^native_client_sdk"))
1235 file_filter = lambda f: input_api.FilterSourceFile(
1236 f, white_list=file_inclusion_pattern, black_list=black_list)
1237 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1238 for line_num, line in fpath.ChangedContents():
1239 for (deprecated_value, value) in _DEPRECATED_CSS:
1240 if input_api.re.search(deprecated_value, line):
1241 results.append(output_api.PresubmitError(
1242 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1243 (fpath.LocalPath(), line_num, deprecated_value, value)))
1244 return results
1246 def _CommonChecks(input_api, output_api):
1247 """Checks common to both upload and commit."""
1248 results = []
1249 results.extend(input_api.canned_checks.PanProjectChecks(
1250 input_api, output_api,
1251 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1252 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1253 results.extend(
1254 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1255 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1256 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1257 results.extend(_CheckNoNewWStrings(input_api, output_api))
1258 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1259 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1260 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1261 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1262 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1263 results.extend(_CheckFilePermissions(input_api, output_api))
1264 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1265 results.extend(_CheckIncludeOrder(input_api, output_api))
1266 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1267 results.extend(_CheckPatchFiles(input_api, output_api))
1268 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1269 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1270 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1271 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1272 results.extend(
1273 input_api.canned_checks.CheckChangeHasNoTabs(
1274 input_api,
1275 output_api,
1276 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1277 results.extend(_CheckSpamLogging(input_api, output_api))
1278 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1279 results.extend(_CheckCygwinShell(input_api, output_api))
1280 results.extend(_CheckUserActionUpdate(input_api, output_api))
1281 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1282 results.extend(_CheckParseErrors(input_api, output_api))
1283 results.extend(_CheckForIPCRules(input_api, output_api))
1285 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1286 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1287 input_api, output_api,
1288 input_api.PresubmitLocalPath(),
1289 whitelist=[r'^PRESUBMIT_test\.py$']))
1290 return results
1293 def _CheckAuthorizedAuthor(input_api, output_api):
1294 """For non-googler/chromites committers, verify the author's email address is
1295 in AUTHORS.
1297 # TODO(maruel): Add it to input_api?
1298 import fnmatch
1300 author = input_api.change.author_email
1301 if not author:
1302 input_api.logging.info('No author, skipping AUTHOR check')
1303 return []
1304 authors_path = input_api.os_path.join(
1305 input_api.PresubmitLocalPath(), 'AUTHORS')
1306 valid_authors = (
1307 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1308 for line in open(authors_path))
1309 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1310 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1311 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1312 return [output_api.PresubmitPromptWarning(
1313 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1314 '\n'
1315 'http://www.chromium.org/developers/contributing-code and read the '
1316 '"Legal" section\n'
1317 'If you are a chromite, verify the contributor signed the CLA.') %
1318 author)]
1319 return []
1322 def _CheckPatchFiles(input_api, output_api):
1323 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1324 if f.LocalPath().endswith(('.orig', '.rej'))]
1325 if problems:
1326 return [output_api.PresubmitError(
1327 "Don't commit .rej and .orig files.", problems)]
1328 else:
1329 return []
1332 def _DidYouMeanOSMacro(bad_macro):
1333 try:
1334 return {'A': 'OS_ANDROID',
1335 'B': 'OS_BSD',
1336 'C': 'OS_CHROMEOS',
1337 'F': 'OS_FREEBSD',
1338 'L': 'OS_LINUX',
1339 'M': 'OS_MACOSX',
1340 'N': 'OS_NACL',
1341 'O': 'OS_OPENBSD',
1342 'P': 'OS_POSIX',
1343 'S': 'OS_SOLARIS',
1344 'W': 'OS_WIN'}[bad_macro[3].upper()]
1345 except KeyError:
1346 return ''
1349 def _CheckForInvalidOSMacrosInFile(input_api, f):
1350 """Check for sensible looking, totally invalid OS macros."""
1351 preprocessor_statement = input_api.re.compile(r'^\s*#')
1352 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1353 results = []
1354 for lnum, line in f.ChangedContents():
1355 if preprocessor_statement.search(line):
1356 for match in os_macro.finditer(line):
1357 if not match.group(1) in _VALID_OS_MACROS:
1358 good = _DidYouMeanOSMacro(match.group(1))
1359 did_you_mean = ' (did you mean %s?)' % good if good else ''
1360 results.append(' %s:%d %s%s' % (f.LocalPath(),
1361 lnum,
1362 match.group(1),
1363 did_you_mean))
1364 return results
1367 def _CheckForInvalidOSMacros(input_api, output_api):
1368 """Check all affected files for invalid OS macros."""
1369 bad_macros = []
1370 for f in input_api.AffectedFiles():
1371 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1372 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1374 if not bad_macros:
1375 return []
1377 return [output_api.PresubmitError(
1378 'Possibly invalid OS macro[s] found. Please fix your code\n'
1379 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1381 def _CheckForIPCRules(input_api, output_api):
1382 """Check for same IPC rules described in
1383 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1385 base_pattern = r'IPC_ENUM_TRAITS\('
1386 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1387 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1389 problems = []
1390 for f in input_api.AffectedSourceFiles(None):
1391 local_path = f.LocalPath()
1392 if not local_path.endswith('.h'):
1393 continue
1394 for line_number, line in f.ChangedContents():
1395 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1396 problems.append(
1397 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1399 if problems:
1400 return [output_api.PresubmitPromptWarning(
1401 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1402 else:
1403 return []
1406 def CheckChangeOnUpload(input_api, output_api):
1407 results = []
1408 results.extend(_CommonChecks(input_api, output_api))
1409 results.extend(_CheckJavaStyle(input_api, output_api))
1410 return results
1413 def GetTryServerMasterForBot(bot):
1414 """Returns the Try Server master for the given bot.
1416 It tries to guess the master from the bot name, but may still fail
1417 and return None. There is no longer a default master.
1419 # Potentially ambiguous bot names are listed explicitly.
1420 master_map = {
1421 'linux_gpu': 'tryserver.chromium.gpu',
1422 'mac_gpu': 'tryserver.chromium.gpu',
1423 'win_gpu': 'tryserver.chromium.gpu',
1424 'chromium_presubmit': 'tryserver.chromium.linux',
1425 'blink_presubmit': 'tryserver.chromium.linux',
1426 'tools_build_presubmit': 'tryserver.chromium.linux',
1428 master = master_map.get(bot)
1429 if not master:
1430 if 'gpu' in bot:
1431 master = 'tryserver.chromium.gpu'
1432 elif 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1433 master = 'tryserver.chromium.linux'
1434 elif 'win' in bot:
1435 master = 'tryserver.chromium.win'
1436 elif 'mac' in bot or 'ios' in bot:
1437 master = 'tryserver.chromium.mac'
1438 return master
1441 def GetDefaultTryConfigs(bots=None):
1442 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1444 To add tests to this list, they MUST be in the the corresponding master's
1445 gatekeeper config. For example, anything on master.chromium would be closed by
1446 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1448 If 'bots' is specified, will only return configurations for bots in that list.
1451 standard_tests = [
1452 'base_unittests',
1453 'browser_tests',
1454 'cacheinvalidation_unittests',
1455 'check_deps',
1456 'check_deps2git',
1457 'content_browsertests',
1458 'content_unittests',
1459 'crypto_unittests',
1460 'gpu_unittests',
1461 'interactive_ui_tests',
1462 'ipc_tests',
1463 'jingle_unittests',
1464 'media_unittests',
1465 'net_unittests',
1466 'ppapi_unittests',
1467 'printing_unittests',
1468 'sql_unittests',
1469 'sync_unit_tests',
1470 'unit_tests',
1471 # Broken in release.
1472 #'url_unittests',
1473 #'webkit_unit_tests',
1476 builders_and_tests = {
1477 # TODO(maruel): Figure out a way to run 'sizes' where people can
1478 # effectively update the perf expectation correctly. This requires a
1479 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1480 # incremental build. Reference:
1481 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1482 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1483 # of this step as a try job failure.
1484 'android_aosp': ['compile'],
1485 'android_arm64_dbg_recipe': ['slave_steps'],
1486 'android_chromium_gn_compile_dbg': ['compile'],
1487 'android_chromium_gn_compile_rel': ['compile'],
1488 'android_clang_dbg': ['slave_steps'],
1489 'android_clang_dbg_recipe': ['slave_steps'],
1490 'android_dbg_tests_recipe': ['slave_steps'],
1491 'cros_x86': ['defaulttests'],
1492 'ios_dbg_simulator': [
1493 'compile',
1494 'base_unittests',
1495 'content_unittests',
1496 'crypto_unittests',
1497 'url_unittests',
1498 'net_unittests',
1499 'sql_unittests',
1500 'ui_unittests',
1502 'ios_rel_device': ['compile'],
1503 'ios_rel_device_ninja': ['compile'],
1504 'linux_asan': ['compile'],
1505 'mac_asan': ['compile'],
1506 #TODO(stip): Change the name of this builder to reflect that it's release.
1507 'linux_gtk': standard_tests,
1508 'linux_chromeos_asan': ['compile'],
1509 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1510 'linux_chromium_chromeos_rel_swarming': ['defaulttests'],
1511 'linux_chromium_compile_dbg': ['defaulttests'],
1512 'linux_chromium_gn_dbg': ['compile'],
1513 'linux_chromium_gn_rel': ['defaulttests'],
1514 'linux_chromium_rel_swarming': ['defaulttests'],
1515 'linux_chromium_clang_dbg': ['defaulttests'],
1516 'linux_gpu': ['defaulttests'],
1517 'linux_nacl_sdk_build': ['compile'],
1518 'mac_chromium_compile_dbg': ['defaulttests'],
1519 'mac_chromium_rel_swarming': ['defaulttests'],
1520 'mac_gpu': ['defaulttests'],
1521 'mac_nacl_sdk_build': ['compile'],
1522 'win_chromium_compile_dbg': ['defaulttests'],
1523 'win_chromium_dbg': ['defaulttests'],
1524 'win_chromium_rel_swarming': ['defaulttests'],
1525 'win_chromium_x64_rel_swarming': ['defaulttests'],
1526 'win_gpu': ['defaulttests'],
1527 'win_nacl_sdk_build': ['compile'],
1528 'win8_chromium_rel': ['defaulttests'],
1531 if bots:
1532 filtered_builders_and_tests = dict((bot, set(builders_and_tests[bot]))
1533 for bot in bots)
1534 else:
1535 filtered_builders_and_tests = dict(
1536 (bot, set(tests))
1537 for bot, tests in builders_and_tests.iteritems())
1539 # Build up the mapping from tryserver master to bot/test.
1540 out = dict()
1541 for bot, tests in filtered_builders_and_tests.iteritems():
1542 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1543 return out
1546 def CheckChangeOnCommit(input_api, output_api):
1547 results = []
1548 results.extend(_CommonChecks(input_api, output_api))
1549 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1550 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1551 # input_api, output_api, sources))
1552 # Make sure the tree is 'open'.
1553 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1554 input_api,
1555 output_api,
1556 json_url='http://chromium-status.appspot.com/current?format=json'))
1558 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1559 input_api, output_api))
1560 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1561 input_api, output_api))
1562 return results
1565 def GetPreferredTryMasters(project, change):
1566 files = change.LocalPaths()
1568 if not files or all(re.search(r'[\\\/]OWNERS$', f) for f in files):
1569 return {}
1571 if all(re.search(r'\.(m|mm)$|(^|[\\\/_])mac[\\\/_.]', f) for f in files):
1572 return GetDefaultTryConfigs([
1573 'mac_chromium_compile_dbg',
1574 'mac_chromium_rel_swarming',
1576 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1577 return GetDefaultTryConfigs([
1578 'win_chromium_dbg',
1579 'win_chromium_rel_swarming',
1580 'win8_chromium_rel',
1582 if all(re.search(r'(^|[\\\/_])android[\\\/_.]', f) for f in files):
1583 return GetDefaultTryConfigs([
1584 'android_aosp',
1585 'android_clang_dbg',
1586 'android_dbg_tests_recipe',
1588 if all(re.search(r'[\\\/_]ios[\\\/_.]', f) for f in files):
1589 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1591 builders = [
1592 'android_arm64_dbg_recipe',
1593 'android_chromium_gn_compile_rel',
1594 'android_chromium_gn_compile_dbg',
1595 'android_clang_dbg',
1596 'android_clang_dbg_recipe',
1597 'android_dbg_tests_recipe',
1598 'ios_dbg_simulator',
1599 'ios_rel_device',
1600 'ios_rel_device_ninja',
1601 'linux_chromium_chromeos_rel_swarming',
1602 'linux_chromium_clang_dbg',
1603 'linux_chromium_gn_dbg',
1604 'linux_chromium_gn_rel',
1605 'linux_chromium_rel_swarming',
1606 'linux_gpu',
1607 'mac_chromium_compile_dbg',
1608 'mac_chromium_rel_swarming',
1609 'mac_gpu',
1610 'win_chromium_compile_dbg',
1611 'win_chromium_rel_swarming',
1612 'win_chromium_x64_rel_swarming',
1613 'win_gpu',
1614 'win8_chromium_rel',
1617 # Match things like path/aura/file.cc and path/file_aura.cc.
1618 # Same for chromeos.
1619 if any(re.search(r'[\\\/_](aura|chromeos)', f) for f in files):
1620 builders.extend([
1621 'linux_chromeos_asan',
1622 'linux_chromium_chromeos_clang_dbg'
1625 # If there are gyp changes to base, build, or chromeos, run a full cros build
1626 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1627 # files have a much higher chance of breaking the cros build, which is
1628 # differnt from the linux_chromeos build that most chrome developers test
1629 # with.
1630 if any(re.search('^(base|build|chromeos).*\.gypi?$', f) for f in files):
1631 builders.extend(['cros_x86'])
1633 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1634 # unless they're .gyp(i) files as changes to those files can break the gyp
1635 # step on that bot.
1636 if (not all(re.search('^chrome', f) for f in files) or
1637 any(re.search('\.gypi?$', f) for f in files)):
1638 builders.extend(['android_aosp'])
1640 return GetDefaultTryConfigs(builders)