1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
18 r
"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r
"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r
"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r
"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
26 r
".+[\\\/]pnacl_shim\.c$",
27 r
"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r
"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
34 r
"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r
"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS
= r
'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
44 _TEST_CODE_EXCLUDED_PATHS
= (
45 r
'.*[/\\](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS
,
46 r
'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS
,
47 r
'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS
,
49 r
'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS
,
50 r
'.*[/\\](test|tool(s)?)[/\\].*',
51 # content_shell is used for running layout tests.
52 r
'content[/\\]shell[/\\].*',
53 # At request of folks maintaining this folder.
54 r
'chrome[/\\]browser[/\\]automation[/\\].*',
55 # Non-production example code.
56 r
'mojo[/\\]examples[/\\].*',
57 # Launcher for running iOS tests on the simulator.
58 r
'testing[/\\]iossim[/\\]iossim\.mm$',
61 _TEST_ONLY_WARNING
= (
62 'You might be calling functions intended only for testing from\n'
63 'production code. It is OK to ignore this warning if you know what\n'
64 'you are doing, as the heuristics used to detect the situation are\n'
65 'not perfect. The commit queue will not block on this warning.')
68 _INCLUDE_ORDER_WARNING
= (
69 'Your #include order seems to be broken. Send mail to\n'
70 'marja@chromium.org if this is not the case.')
73 _BANNED_OBJC_FUNCTIONS
= (
77 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
78 'prohibited. Please use CrTrackingArea instead.',
79 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
88 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
93 'convertPointFromBase:',
95 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
96 'Please use |convertPoint:(point) fromView:nil| instead.',
97 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
102 'convertPointToBase:',
104 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
105 'Please use |convertPoint:(point) toView:nil| instead.',
106 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
111 'convertRectFromBase:',
113 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
114 'Please use |convertRect:(point) fromView:nil| instead.',
115 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
120 'convertRectToBase:',
122 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
123 'Please use |convertRect:(point) toView:nil| instead.',
124 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
129 'convertSizeFromBase:',
131 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
132 'Please use |convertSize:(point) fromView:nil| instead.',
133 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
138 'convertSizeToBase:',
140 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
141 'Please use |convertSize:(point) toView:nil| instead.',
142 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
149 _BANNED_CPP_FUNCTIONS
= (
150 # Make sure that gtest's FRIEND_TEST() macro is not used; the
151 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
152 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
156 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
157 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
165 'New code should not use ScopedAllowIO. Post a task to the blocking',
166 'pool or the FILE thread instead.',
170 r
"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
171 r
"^components[\\\/]breakpad[\\\/]app[\\\/]breakpad_mac\.mm$",
172 r
"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
173 r
"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
174 r
"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
175 r
"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r
"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
182 'The use of SkRefPtr is prohibited. ',
183 'Please use skia::RefPtr instead.'
191 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
192 'Please use skia::RefPtr instead.'
200 'The use of SkAutoTUnref is dangerous because it implicitly ',
201 'converts to a raw pointer. Please use skia::RefPtr instead.'
209 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
210 'because it implicitly converts to a raw pointer. ',
211 'Please use skia::RefPtr instead.'
217 r
'/HANDLE_EINTR\(.*close',
219 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
220 'descriptor will be closed, and it is incorrect to retry the close.',
221 'Either call close directly and ignore its return value, or wrap close',
222 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
228 r
'/IGNORE_EINTR\((?!.*close)',
230 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
231 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
235 # Files that #define IGNORE_EINTR.
236 r
'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
237 r
'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
243 'Do not introduce new v8::Extensions into the code base, use',
244 'gin::Wrappable instead. See http://crbug.com/334679',
248 r
'extensions[/\\]renderer[/\\]safe_builtins\.*',
255 # Please keep sorted.
259 'OS_CAT', # For testing.
274 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api
, output_api
):
275 """Attempts to prevent use of functions intended only for testing in
276 non-testing code. For now this is just a best-effort implementation
277 that ignores header files and may have some false positives. A
278 better implementation would probably need a proper C++ parser.
280 # We only scan .cc files and the like, as the declaration of
281 # for-testing functions in header files are hard to distinguish from
282 # calls to such functions without a proper C++ parser.
283 file_inclusion_pattern
= r
'.+%s' % _IMPLEMENTATION_EXTENSIONS
285 base_function_pattern
= r
'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
286 inclusion_pattern
= input_api
.re
.compile(r
'(%s)\s*\(' % base_function_pattern
)
287 comment_pattern
= input_api
.re
.compile(r
'//.*(%s)' % base_function_pattern
)
288 exclusion_pattern
= input_api
.re
.compile(
289 r
'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
290 base_function_pattern
, base_function_pattern
))
292 def FilterFile(affected_file
):
293 black_list
= (_EXCLUDED_PATHS
+
294 _TEST_CODE_EXCLUDED_PATHS
+
295 input_api
.DEFAULT_BLACK_LIST
)
296 return input_api
.FilterSourceFile(
298 white_list
=(file_inclusion_pattern
, ),
299 black_list
=black_list
)
302 for f
in input_api
.AffectedSourceFiles(FilterFile
):
303 local_path
= f
.LocalPath()
304 for line_number
, line
in f
.ChangedContents():
305 if (inclusion_pattern
.search(line
) and
306 not comment_pattern
.search(line
) and
307 not exclusion_pattern
.search(line
)):
309 '%s:%d\n %s' % (local_path
, line_number
, line
.strip()))
312 return [output_api
.PresubmitPromptOrNotify(_TEST_ONLY_WARNING
, problems
)]
317 def _CheckNoIOStreamInHeaders(input_api
, output_api
):
318 """Checks to make sure no .h files include <iostream>."""
320 pattern
= input_api
.re
.compile(r
'^#include\s*<iostream>',
321 input_api
.re
.MULTILINE
)
322 for f
in input_api
.AffectedSourceFiles(input_api
.FilterSourceFile
):
323 if not f
.LocalPath().endswith('.h'):
325 contents
= input_api
.ReadFile(f
)
326 if pattern
.search(contents
):
330 return [ output_api
.PresubmitError(
331 'Do not #include <iostream> in header files, since it inserts static '
332 'initialization into every file including the header. Instead, '
333 '#include <ostream>. See http://crbug.com/94794',
338 def _CheckNoUNIT_TESTInSourceFiles(input_api
, output_api
):
339 """Checks to make sure no source files use UNIT_TEST"""
341 for f
in input_api
.AffectedFiles():
342 if (not f
.LocalPath().endswith(('.cc', '.mm'))):
345 for line_num
, line
in f
.ChangedContents():
346 if 'UNIT_TEST ' in line
or line
.endswith('UNIT_TEST'):
347 problems
.append(' %s:%d' % (f
.LocalPath(), line_num
))
351 return [output_api
.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
352 '\n'.join(problems
))]
355 def _CheckNoNewWStrings(input_api
, output_api
):
356 """Checks to make sure we don't introduce use of wstrings."""
358 for f
in input_api
.AffectedFiles():
359 if (not f
.LocalPath().endswith(('.cc', '.h')) or
360 f
.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
364 for line_num
, line
in f
.ChangedContents():
365 if 'presubmit: allow wstring' in line
:
367 elif not allowWString
and 'wstring' in line
:
368 problems
.append(' %s:%d' % (f
.LocalPath(), line_num
))
375 return [output_api
.PresubmitPromptWarning('New code should not use wstrings.'
376 ' If you are calling a cross-platform API that accepts a wstring, '
378 '\n'.join(problems
))]
381 def _CheckNoDEPSGIT(input_api
, output_api
):
382 """Make sure .DEPS.git is never modified manually."""
383 if any(f
.LocalPath().endswith('.DEPS.git') for f
in
384 input_api
.AffectedFiles()):
385 return [output_api
.PresubmitError(
386 'Never commit changes to .DEPS.git. This file is maintained by an\n'
387 'automated system based on what\'s in DEPS and your changes will be\n'
389 'See http://code.google.com/p/chromium/wiki/UsingNewGit#Rolling_DEPS\n'
390 'for more information')]
394 def _CheckNoBannedFunctions(input_api
, output_api
):
395 """Make sure that banned functions are not used."""
399 file_filter
= lambda f
: f
.LocalPath().endswith(('.mm', '.m', '.h'))
400 for f
in input_api
.AffectedFiles(file_filter
=file_filter
):
401 for line_num
, line
in f
.ChangedContents():
402 for func_name
, message
, error
in _BANNED_OBJC_FUNCTIONS
:
404 if func_name
[0:1] == '/':
405 regex
= func_name
[1:]
406 if input_api
.re
.search(regex
, line
):
408 elif func_name
in line
:
414 problems
.append(' %s:%d:' % (f
.LocalPath(), line_num
))
415 for message_line
in message
:
416 problems
.append(' %s' % message_line
)
418 file_filter
= lambda f
: f
.LocalPath().endswith(('.cc', '.mm', '.h'))
419 for f
in input_api
.AffectedFiles(file_filter
=file_filter
):
420 for line_num
, line
in f
.ChangedContents():
421 for func_name
, message
, error
, excluded_paths
in _BANNED_CPP_FUNCTIONS
:
422 def IsBlacklisted(affected_file
, blacklist
):
423 local_path
= affected_file
.LocalPath()
424 for item
in blacklist
:
425 if input_api
.re
.match(item
, local_path
):
428 if IsBlacklisted(f
, excluded_paths
):
431 if func_name
[0:1] == '/':
432 regex
= func_name
[1:]
433 if input_api
.re
.search(regex
, line
):
435 elif func_name
in line
:
441 problems
.append(' %s:%d:' % (f
.LocalPath(), line_num
))
442 for message_line
in message
:
443 problems
.append(' %s' % message_line
)
447 result
.append(output_api
.PresubmitPromptWarning(
448 'Banned functions were used.\n' + '\n'.join(warnings
)))
450 result
.append(output_api
.PresubmitError(
451 'Banned functions were used.\n' + '\n'.join(errors
)))
455 def _CheckNoPragmaOnce(input_api
, output_api
):
456 """Make sure that banned functions are not used."""
458 pattern
= input_api
.re
.compile(r
'^#pragma\s+once',
459 input_api
.re
.MULTILINE
)
460 for f
in input_api
.AffectedSourceFiles(input_api
.FilterSourceFile
):
461 if not f
.LocalPath().endswith('.h'):
463 contents
= input_api
.ReadFile(f
)
464 if pattern
.search(contents
):
468 return [output_api
.PresubmitError(
469 'Do not use #pragma once in header files.\n'
470 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
475 def _CheckNoTrinaryTrueFalse(input_api
, output_api
):
476 """Checks to make sure we don't introduce use of foo ? true : false."""
478 pattern
= input_api
.re
.compile(r
'\?\s*(true|false)\s*:\s*(true|false)')
479 for f
in input_api
.AffectedFiles():
480 if not f
.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
483 for line_num
, line
in f
.ChangedContents():
484 if pattern
.match(line
):
485 problems
.append(' %s:%d' % (f
.LocalPath(), line_num
))
489 return [output_api
.PresubmitPromptWarning(
490 'Please consider avoiding the "? true : false" pattern if possible.\n' +
491 '\n'.join(problems
))]
494 def _CheckUnwantedDependencies(input_api
, output_api
):
495 """Runs checkdeps on #include statements added in this
496 change. Breaking - rules is an error, breaking ! rules is a
499 # We need to wait until we have an input_api object and use this
500 # roundabout construct to import checkdeps because this file is
501 # eval-ed and thus doesn't have __file__.
502 original_sys_path
= sys
.path
504 sys
.path
= sys
.path
+ [input_api
.os_path
.join(
505 input_api
.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
507 from cpp_checker
import CppChecker
508 from rules
import Rule
510 # Restore sys.path to what it was before.
511 sys
.path
= original_sys_path
514 for f
in input_api
.AffectedFiles():
515 if not CppChecker
.IsCppFile(f
.LocalPath()):
518 changed_lines
= [line
for line_num
, line
in f
.ChangedContents()]
519 added_includes
.append([f
.LocalPath(), changed_lines
])
521 deps_checker
= checkdeps
.DepsChecker(input_api
.PresubmitLocalPath())
523 error_descriptions
= []
524 warning_descriptions
= []
525 for path
, rule_type
, rule_description
in deps_checker
.CheckAddedCppIncludes(
527 description_with_path
= '%s\n %s' % (path
, rule_description
)
528 if rule_type
== Rule
.DISALLOW
:
529 error_descriptions
.append(description_with_path
)
531 warning_descriptions
.append(description_with_path
)
534 if error_descriptions
:
535 results
.append(output_api
.PresubmitError(
536 'You added one or more #includes that violate checkdeps rules.',
538 if warning_descriptions
:
539 results
.append(output_api
.PresubmitPromptOrNotify(
540 'You added one or more #includes of files that are temporarily\n'
541 'allowed but being removed. Can you avoid introducing the\n'
542 '#include? See relevant DEPS file(s) for details and contacts.',
543 warning_descriptions
))
547 def _CheckFilePermissions(input_api
, output_api
):
548 """Check that all files have their permissions properly set."""
549 if input_api
.platform
== 'win32':
551 args
= [sys
.executable
, 'tools/checkperms/checkperms.py', '--root',
552 input_api
.change
.RepositoryRoot()]
553 for f
in input_api
.AffectedFiles():
554 args
+= ['--file', f
.LocalPath()]
555 checkperms
= input_api
.subprocess
.Popen(args
,
556 stdout
=input_api
.subprocess
.PIPE
)
557 errors
= checkperms
.communicate()[0].strip()
559 return [output_api
.PresubmitError('checkperms.py failed.',
560 errors
.splitlines())]
564 def _CheckNoAuraWindowPropertyHInHeaders(input_api
, output_api
):
565 """Makes sure we don't include ui/aura/window_property.h
568 pattern
= input_api
.re
.compile(r
'^#include\s*"ui/aura/window_property.h"')
570 for f
in input_api
.AffectedFiles():
571 if not f
.LocalPath().endswith('.h'):
573 for line_num
, line
in f
.ChangedContents():
574 if pattern
.match(line
):
575 errors
.append(' %s:%d' % (f
.LocalPath(), line_num
))
579 results
.append(output_api
.PresubmitError(
580 'Header files should not include ui/aura/window_property.h', errors
))
584 def _CheckIncludeOrderForScope(scope
, input_api
, file_path
, changed_linenums
):
585 """Checks that the lines in scope occur in the right order.
587 1. C system files in alphabetical order
588 2. C++ system files in alphabetical order
589 3. Project's .h files
592 c_system_include_pattern
= input_api
.re
.compile(r
'\s*#include <.*\.h>')
593 cpp_system_include_pattern
= input_api
.re
.compile(r
'\s*#include <.*>')
594 custom_include_pattern
= input_api
.re
.compile(r
'\s*#include ".*')
596 C_SYSTEM_INCLUDES
, CPP_SYSTEM_INCLUDES
, CUSTOM_INCLUDES
= range(3)
598 state
= C_SYSTEM_INCLUDES
601 previous_line_num
= 0
602 problem_linenums
= []
603 for line_num
, line
in scope
:
604 if c_system_include_pattern
.match(line
):
605 if state
!= C_SYSTEM_INCLUDES
:
606 problem_linenums
.append((line_num
, previous_line_num
))
607 elif previous_line
and previous_line
> line
:
608 problem_linenums
.append((line_num
, previous_line_num
))
609 elif cpp_system_include_pattern
.match(line
):
610 if state
== C_SYSTEM_INCLUDES
:
611 state
= CPP_SYSTEM_INCLUDES
612 elif state
== CUSTOM_INCLUDES
:
613 problem_linenums
.append((line_num
, previous_line_num
))
614 elif previous_line
and previous_line
> line
:
615 problem_linenums
.append((line_num
, previous_line_num
))
616 elif custom_include_pattern
.match(line
):
617 if state
!= CUSTOM_INCLUDES
:
618 state
= CUSTOM_INCLUDES
619 elif previous_line
and previous_line
> line
:
620 problem_linenums
.append((line_num
, previous_line_num
))
622 problem_linenums
.append(line_num
)
624 previous_line_num
= line_num
627 for (line_num
, previous_line_num
) in problem_linenums
:
628 if line_num
in changed_linenums
or previous_line_num
in changed_linenums
:
629 warnings
.append(' %s:%d' % (file_path
, line_num
))
633 def _CheckIncludeOrderInFile(input_api
, f
, changed_linenums
):
634 """Checks the #include order for the given file f."""
636 system_include_pattern
= input_api
.re
.compile(r
'\s*#include \<.*')
637 # Exclude the following includes from the check:
638 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
640 # 2) <atlbase.h>, "build/build_config.h"
641 excluded_include_pattern
= input_api
.re
.compile(
642 r
'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
643 custom_include_pattern
= input_api
.re
.compile(r
'\s*#include "(?P<FILE>.*)"')
644 # Match the final or penultimate token if it is xxxtest so we can ignore it
645 # when considering the special first include.
646 test_file_tag_pattern
= input_api
.re
.compile(
647 r
'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
648 if_pattern
= input_api
.re
.compile(
649 r
'\s*#\s*(if|elif|else|endif|define|undef).*')
650 # Some files need specialized order of includes; exclude such files from this
652 uncheckable_includes_pattern
= input_api
.re
.compile(
654 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
656 contents
= f
.NewContents()
660 # Handle the special first include. If the first include file is
661 # some/path/file.h, the corresponding including file can be some/path/file.cc,
662 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
663 # etc. It's also possible that no special first include exists.
664 # If the included file is some/path/file_platform.h the including file could
665 # also be some/path/file_xxxtest_platform.h.
666 including_file_base_name
= test_file_tag_pattern
.sub(
667 '', input_api
.os_path
.basename(f
.LocalPath()))
669 for line
in contents
:
671 if system_include_pattern
.match(line
):
672 # No special first include -> process the line again along with normal
676 match
= custom_include_pattern
.match(line
)
678 match_dict
= match
.groupdict()
679 header_basename
= test_file_tag_pattern
.sub(
680 '', input_api
.os_path
.basename(match_dict
['FILE'])).replace('.h', '')
682 if header_basename
not in including_file_base_name
:
683 # No special first include -> process the line again along with normal
688 # Split into scopes: Each region between #if and #endif is its own scope.
691 for line
in contents
[line_num
:]:
693 if uncheckable_includes_pattern
.match(line
):
695 if if_pattern
.match(line
):
696 scopes
.append(current_scope
)
698 elif ((system_include_pattern
.match(line
) or
699 custom_include_pattern
.match(line
)) and
700 not excluded_include_pattern
.match(line
)):
701 current_scope
.append((line_num
, line
))
702 scopes
.append(current_scope
)
705 warnings
.extend(_CheckIncludeOrderForScope(scope
, input_api
, f
.LocalPath(),
710 def _CheckIncludeOrder(input_api
, output_api
):
711 """Checks that the #include order is correct.
713 1. The corresponding header for source files.
714 2. C system files in alphabetical order
715 3. C++ system files in alphabetical order
716 4. Project's .h files in alphabetical order
718 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
719 these rules separately.
723 for f
in input_api
.AffectedFiles():
724 if f
.LocalPath().endswith(('.cc', '.h')):
725 changed_linenums
= set(line_num
for line_num
, _
in f
.ChangedContents())
726 warnings
.extend(_CheckIncludeOrderInFile(input_api
, f
, changed_linenums
))
730 results
.append(output_api
.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING
,
735 def _CheckForVersionControlConflictsInFile(input_api
, f
):
736 pattern
= input_api
.re
.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
738 for line_num
, line
in f
.ChangedContents():
739 if pattern
.match(line
):
740 errors
.append(' %s:%d %s' % (f
.LocalPath(), line_num
, line
))
744 def _CheckForVersionControlConflicts(input_api
, output_api
):
745 """Usually this is not intentional and will cause a compile failure."""
747 for f
in input_api
.AffectedFiles():
748 errors
.extend(_CheckForVersionControlConflictsInFile(input_api
, f
))
752 results
.append(output_api
.PresubmitError(
753 'Version control conflict markers found, please resolve.', errors
))
757 def _CheckHardcodedGoogleHostsInLowerLayers(input_api
, output_api
):
758 def FilterFile(affected_file
):
759 """Filter function for use with input_api.AffectedSourceFiles,
760 below. This filters out everything except non-test files from
761 top-level directories that generally speaking should not hard-code
762 service URLs (e.g. src/android_webview/, src/content/ and others).
764 return input_api
.FilterSourceFile(
766 white_list
=(r
'^(android_webview|base|content|net)[\\\/].*', ),
767 black_list
=(_EXCLUDED_PATHS
+
768 _TEST_CODE_EXCLUDED_PATHS
+
769 input_api
.DEFAULT_BLACK_LIST
))
771 base_pattern
= '"[^"]*google\.com[^"]*"'
772 comment_pattern
= input_api
.re
.compile('//.*%s' % base_pattern
)
773 pattern
= input_api
.re
.compile(base_pattern
)
774 problems
= [] # items are (filename, line_number, line)
775 for f
in input_api
.AffectedSourceFiles(FilterFile
):
776 for line_num
, line
in f
.ChangedContents():
777 if not comment_pattern
.search(line
) and pattern
.search(line
):
778 problems
.append((f
.LocalPath(), line_num
, line
))
781 return [output_api
.PresubmitPromptOrNotify(
782 'Most layers below src/chrome/ should not hardcode service URLs.\n'
783 'Are you sure this is correct?',
785 problem
[0], problem
[1], problem
[2]) for problem
in problems
])]
790 def _CheckNoAbbreviationInPngFileName(input_api
, output_api
):
791 """Makes sure there are no abbreviations in the name of PNG files.
793 pattern
= input_api
.re
.compile(r
'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
795 for f
in input_api
.AffectedFiles(include_deletes
=False):
796 if pattern
.match(f
.LocalPath()):
797 errors
.append(' %s' % f
.LocalPath())
801 results
.append(output_api
.PresubmitError(
802 'The name of PNG files should not have abbreviations. \n'
803 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
804 'Contact oshima@chromium.org if you have questions.', errors
))
808 def _FilesToCheckForIncomingDeps(re
, changed_lines
):
809 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
810 a set of DEPS entries that we should look up.
812 For a directory (rather than a specific filename) we fake a path to
813 a specific filename by adding /DEPS. This is chosen as a file that
814 will seldom or never be subject to per-file include_rules.
816 # We ignore deps entries on auto-generated directories.
817 AUTO_GENERATED_DIRS
= ['grit', 'jni']
819 # This pattern grabs the path without basename in the first
820 # parentheses, and the basename (if present) in the second. It
821 # relies on the simple heuristic that if there is a basename it will
822 # be a header file ending in ".h".
823 pattern
= re
.compile(
824 r
"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
826 for changed_line
in changed_lines
:
827 m
= pattern
.match(changed_line
)
830 if path
.split('/')[0] not in AUTO_GENERATED_DIRS
:
832 results
.add('%s%s' % (path
, m
.group(2)))
834 results
.add('%s/DEPS' % path
)
838 def _CheckAddedDepsHaveTargetApprovals(input_api
, output_api
):
839 """When a dependency prefixed with + is added to a DEPS file, we
840 want to make sure that the change is reviewed by an OWNER of the
841 target file or directory, to avoid layering violations from being
842 introduced. This check verifies that this happens.
844 changed_lines
= set()
845 for f
in input_api
.AffectedFiles():
846 filename
= input_api
.os_path
.basename(f
.LocalPath())
847 if filename
== 'DEPS':
848 changed_lines |
= set(line
.strip()
850 in f
.ChangedContents())
851 if not changed_lines
:
854 virtual_depended_on_files
= _FilesToCheckForIncomingDeps(input_api
.re
,
856 if not virtual_depended_on_files
:
859 if input_api
.is_committing
:
861 return [output_api
.PresubmitNotifyResult(
862 '--tbr was specified, skipping OWNERS check for DEPS additions')]
863 if not input_api
.change
.issue
:
864 return [output_api
.PresubmitError(
865 "DEPS approval by OWNERS check failed: this change has "
866 "no Rietveld issue number, so we can't check it for approvals.")]
867 output
= output_api
.PresubmitError
869 output
= output_api
.PresubmitNotifyResult
871 owners_db
= input_api
.owners_db
872 owner_email
, reviewers
= input_api
.canned_checks
._RietveldOwnerAndReviewers
(
874 owners_db
.email_regexp
,
875 approval_needed
=input_api
.is_committing
)
877 owner_email
= owner_email
or input_api
.change
.author_email
879 reviewers_plus_owner
= set(reviewers
)
881 reviewers_plus_owner
.add(owner_email
)
882 missing_files
= owners_db
.files_not_covered_by(virtual_depended_on_files
,
883 reviewers_plus_owner
)
885 # We strip the /DEPS part that was added by
886 # _FilesToCheckForIncomingDeps to fake a path to a file in a
889 start_deps
= path
.rfind('/DEPS')
891 return path
[:start_deps
]
894 unapproved_dependencies
= ["'+%s'," % StripDeps(path
)
895 for path
in missing_files
]
897 if unapproved_dependencies
:
899 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
900 '\n '.join(sorted(unapproved_dependencies
)))]
901 if not input_api
.is_committing
:
902 suggested_owners
= owners_db
.reviewers_for(missing_files
, owner_email
)
903 output_list
.append(output(
904 'Suggested missing target path OWNERS:\n %s' %
905 '\n '.join(suggested_owners
or [])))
911 def _CheckSpamLogging(input_api
, output_api
):
912 file_inclusion_pattern
= r
'.+%s' % _IMPLEMENTATION_EXTENSIONS
913 black_list
= (_EXCLUDED_PATHS
+
914 _TEST_CODE_EXCLUDED_PATHS
+
915 input_api
.DEFAULT_BLACK_LIST
+
916 (r
"^base[\\\/]logging\.h$",
917 r
"^base[\\\/]logging\.cc$",
918 r
"^cloud_print[\\\/]",
919 r
"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
920 r
"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
921 r
"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
922 r
"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
923 r
"startup_browser_creator\.cc$",
924 r
"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
925 r
"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
926 r
"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
927 r
"gl_helper_benchmark\.cc$",
928 r
"^native_client_sdk[\\\/]",
929 r
"^remoting[\\\/]base[\\\/]logging\.h$",
930 r
"^remoting[\\\/]host[\\\/].*",
931 r
"^sandbox[\\\/]linux[\\\/].*",
933 r
"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",))
934 source_file_filter
= lambda x
: input_api
.FilterSourceFile(
935 x
, white_list
=(file_inclusion_pattern
,), black_list
=black_list
)
940 for f
in input_api
.AffectedSourceFiles(source_file_filter
):
941 contents
= input_api
.ReadFile(f
, 'rb')
942 if re
.search(r
"\bD?LOG\s*\(\s*INFO\s*\)", contents
):
943 log_info
.append(f
.LocalPath())
944 elif re
.search(r
"\bD?LOG_IF\s*\(\s*INFO\s*,", contents
):
945 log_info
.append(f
.LocalPath())
947 if re
.search(r
"\bprintf\(", contents
):
948 printf
.append(f
.LocalPath())
949 elif re
.search(r
"\bfprintf\((stdout|stderr)", contents
):
950 printf
.append(f
.LocalPath())
953 return [output_api
.PresubmitError(
954 'These files spam the console log with LOG(INFO):',
957 return [output_api
.PresubmitError(
958 'These files spam the console log with printf/fprintf:',
963 def _CheckForAnonymousVariables(input_api
, output_api
):
964 """These types are all expected to hold locks while in scope and
965 so should never be anonymous (which causes them to be immediately
967 they_who_must_be_named
= [
971 'SkAutoAlphaRestore',
972 'SkAutoBitmapShaderInstall',
973 'SkAutoBlitterChoose',
974 'SkAutoBounderCommit',
976 'SkAutoCanvasRestore',
977 'SkAutoCommentBlock',
979 'SkAutoDisableDirectionCheck',
980 'SkAutoDisableOvalCheck',
987 'SkAutoMaskFreeImage',
988 'SkAutoMutexAcquire',
989 'SkAutoPathBoundsUpdate',
991 'SkAutoRasterClipValidate',
997 anonymous
= r
'(%s)\s*[({]' % '|'.join(they_who_must_be_named
)
998 # bad: base::AutoLock(lock.get());
999 # not bad: base::AutoLock lock(lock.get());
1000 bad_pattern
= input_api
.re
.compile(anonymous
)
1001 # good: new base::AutoLock(lock.get())
1002 good_pattern
= input_api
.re
.compile(r
'\bnew\s*' + anonymous
)
1005 for f
in input_api
.AffectedFiles():
1006 if not f
.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1008 for linenum
, line
in f
.ChangedContents():
1009 if bad_pattern
.search(line
) and not good_pattern
.search(line
):
1010 errors
.append('%s:%d' % (f
.LocalPath(), linenum
))
1013 return [output_api
.PresubmitError(
1014 'These lines create anonymous variables that need to be named:',
1019 def _CheckCygwinShell(input_api
, output_api
):
1020 source_file_filter
= lambda x
: input_api
.FilterSourceFile(
1021 x
, white_list
=(r
'.+\.(gyp|gypi)$',))
1024 for f
in input_api
.AffectedSourceFiles(source_file_filter
):
1025 for linenum
, line
in f
.ChangedContents():
1026 if 'msvs_cygwin_shell' in line
:
1027 cygwin_shell
.append(f
.LocalPath())
1031 return [output_api
.PresubmitError(
1032 'These files should not use msvs_cygwin_shell (the default is 0):',
1033 items
=cygwin_shell
)]
1037 def _CheckUserActionUpdate(input_api
, output_api
):
1038 """Checks if any new user action has been added."""
1039 if any('actions.xml' == input_api
.os_path
.basename(f
) for f
in
1040 input_api
.LocalPaths()):
1041 # If actions.xml is already included in the changelist, the PRESUBMIT
1042 # for actions.xml will do a more complete presubmit check.
1045 file_filter
= lambda f
: f
.LocalPath().endswith(('.cc', '.mm'))
1046 action_re
= r
'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1047 current_actions
= None
1048 for f
in input_api
.AffectedFiles(file_filter
=file_filter
):
1049 for line_num
, line
in f
.ChangedContents():
1050 match
= input_api
.re
.search(action_re
, line
)
1052 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1054 if not current_actions
:
1055 with
open('tools/metrics/actions/actions.xml') as actions_f
:
1056 current_actions
= actions_f
.read()
1057 # Search for the matched user action name in |current_actions|.
1058 for action_name
in match
.groups():
1059 action
= 'name="{0}"'.format(action_name
)
1060 if action
not in current_actions
:
1061 return [output_api
.PresubmitPromptWarning(
1062 'File %s line %d: %s is missing in '
1063 'tools/metrics/actions/actions.xml. Please run '
1064 'tools/metrics/actions/extract_actions.py to update.'
1065 % (f
.LocalPath(), line_num
, action_name
))]
1069 def _GetJSONParseError(input_api
, filename
, eat_comments
=True):
1071 contents
= input_api
.ReadFile(filename
)
1073 json_comment_eater
= input_api
.os_path
.join(
1074 input_api
.PresubmitLocalPath(),
1075 'tools', 'json_comment_eater', 'json_comment_eater.py')
1076 process
= input_api
.subprocess
.Popen(
1077 [input_api
.python_executable
, json_comment_eater
],
1078 stdin
=input_api
.subprocess
.PIPE
,
1079 stdout
=input_api
.subprocess
.PIPE
,
1080 universal_newlines
=True)
1081 (contents
, _
) = process
.communicate(input=contents
)
1083 input_api
.json
.loads(contents
)
1084 except ValueError as e
:
1089 def _GetIDLParseError(input_api
, filename
):
1091 contents
= input_api
.ReadFile(filename
)
1092 idl_schema
= input_api
.os_path
.join(
1093 input_api
.PresubmitLocalPath(),
1094 'tools', 'json_schema_compiler', 'idl_schema.py')
1095 process
= input_api
.subprocess
.Popen(
1096 [input_api
.python_executable
, idl_schema
],
1097 stdin
=input_api
.subprocess
.PIPE
,
1098 stdout
=input_api
.subprocess
.PIPE
,
1099 stderr
=input_api
.subprocess
.PIPE
,
1100 universal_newlines
=True)
1101 (_
, error
) = process
.communicate(input=contents
)
1102 return error
or None
1103 except ValueError as e
:
1107 def _CheckParseErrors(input_api
, output_api
):
1108 """Check that IDL and JSON files do not contain syntax errors."""
1110 '.idl': _GetIDLParseError
,
1111 '.json': _GetJSONParseError
,
1113 # These paths contain test data and other known invalid JSON files.
1114 excluded_patterns
= [
1116 '^components/policy/resources/policy_templates.json$',
1118 # Most JSON files are preprocessed and support comments, but these do not.
1119 json_no_comments_patterns
= [
1122 # Only run IDL checker on files in these directories.
1123 idl_included_patterns
= [
1124 '^chrome/common/extensions/api/',
1125 '^extensions/common/api/',
1128 def get_action(affected_file
):
1129 filename
= affected_file
.LocalPath()
1130 return actions
.get(input_api
.os_path
.splitext(filename
)[1])
1132 def MatchesFile(patterns
, path
):
1133 for pattern
in patterns
:
1134 if input_api
.re
.search(pattern
, path
):
1138 def FilterFile(affected_file
):
1139 action
= get_action(affected_file
)
1142 path
= affected_file
.LocalPath()
1144 if MatchesFile(excluded_patterns
, path
):
1147 if (action
== _GetIDLParseError
and
1148 not MatchesFile(idl_included_patterns
, path
)):
1153 for affected_file
in input_api
.AffectedFiles(
1154 file_filter
=FilterFile
, include_deletes
=False):
1155 action
= get_action(affected_file
)
1157 if (action
== _GetJSONParseError
and
1158 MatchesFile(json_no_comments_patterns
, affected_file
.LocalPath())):
1159 kwargs
['eat_comments'] = False
1160 parse_error
= action(input_api
,
1161 affected_file
.AbsoluteLocalPath(),
1164 results
.append(output_api
.PresubmitError('%s could not be parsed: %s' %
1165 (affected_file
.LocalPath(), parse_error
)))
1169 def _CheckJavaStyle(input_api
, output_api
):
1170 """Runs checkstyle on changed java files and returns errors if any exist."""
1171 original_sys_path
= sys
.path
1173 sys
.path
= sys
.path
+ [input_api
.os_path
.join(
1174 input_api
.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1177 # Restore sys.path to what it was before.
1178 sys
.path
= original_sys_path
1180 return checkstyle
.RunCheckstyle(
1181 input_api
, output_api
, 'tools/android/checkstyle/chromium-style-5.0.xml')
1186 ( "-webkit-box", "flex" ),
1187 ( "-webkit-inline-box", "inline-flex" ),
1188 ( "-webkit-flex", "flex" ),
1189 ( "-webkit-inline-flex", "inline-flex" ),
1190 ( "-webkit-min-content", "min-content" ),
1191 ( "-webkit-max-content", "max-content" ),
1194 ( "-webkit-background-clip", "background-clip" ),
1195 ( "-webkit-background-origin", "background-origin" ),
1196 ( "-webkit-background-size", "background-size" ),
1197 ( "-webkit-box-shadow", "box-shadow" ),
1200 ( "-webkit-gradient", "gradient" ),
1201 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1202 ( "-webkit-linear-gradient", "linear-gradient" ),
1203 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1204 ( "-webkit-radial-gradient", "radial-gradient" ),
1205 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1208 def _CheckNoDeprecatedCSS(input_api
, output_api
):
1209 """ Make sure that we don't use deprecated CSS
1210 properties, functions or values. Our external
1211 documentation is ignored by the hooks as it
1212 needs to be consumed by WebKit. """
1214 file_inclusion_pattern
= (r
".+\.css$")
1215 black_list
= (_EXCLUDED_PATHS
+
1216 _TEST_CODE_EXCLUDED_PATHS
+
1217 input_api
.DEFAULT_BLACK_LIST
+
1218 (r
"^chrome/common/extensions/docs",
1220 r
"^native_client_sdk"))
1221 file_filter
= lambda f
: input_api
.FilterSourceFile(
1222 f
, white_list
=file_inclusion_pattern
, black_list
=black_list
)
1223 for fpath
in input_api
.AffectedFiles(file_filter
=file_filter
):
1224 for line_num
, line
in fpath
.ChangedContents():
1225 for (deprecated_value
, value
) in _DEPRECATED_CSS
:
1226 if input_api
.re
.search(deprecated_value
, line
):
1227 results
.append(output_api
.PresubmitError(
1228 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1229 (fpath
.LocalPath(), line_num
, deprecated_value
, value
)))
1233 def _StripCommentsAndStrings(input_api
, s
):
1234 """Remove comments, replace string literals by a single token. Requires that
1235 input data is formatted with unix-style line ends."""
1237 s
= input_api
.re
.sub(r
'\\\n', r
'', s
) # Continue lines ending in backslash.
1245 mo
= input_api
.re
.match(r
'//.*', s
[i
:])
1247 i
+= len(mo
.group(0))
1249 mo
= input_api
.re
.match(r
'/\*.*?\*/', s
[i
:], input_api
.re
.DOTALL
)
1251 i
+= len(mo
.group(0))
1255 mo
= input_api
.re
.match(r
"'((\\\\)|(\\')|[^']+?)'", s
[i
:])
1257 raise Exception('bad char: ' + s
[i
:])
1258 i
+= len(mo
.group(0))
1259 out
+= ' CHAR_LITERAL '
1263 mo
= input_api
.re
.match(r
'".*?(?<!\\)(\\\\)*"', s
[i
:])
1265 raise Exception('bad string: ' + s
[i
:])
1266 i
+= len(mo
.group(0))
1267 out
+= ' STRING_LITERAL '
1276 def _CheckContradictoryNotreachedUse(input_api
, output_api
):
1277 file_inclusion_pattern
= (
1278 r
".+\.c$", r
".+\.cc$", r
".+\.cpp$", r
".+\.h$", r
".+\.hpp$", r
".+\.inl$",
1279 r
".+\.m$", r
".+\.mm$" )
1280 black_list
= (_EXCLUDED_PATHS
+ input_api
.DEFAULT_BLACK_LIST
)
1281 file_filter
= lambda f
: input_api
.FilterSourceFile(
1282 f
, white_list
=file_inclusion_pattern
, black_list
=black_list
)
1284 for fpath
in input_api
.AffectedFiles(file_filter
=file_filter
):
1285 results
.extend(_CheckContradictoryNotreachedUseInFile(input_api
, fpath
))
1286 return [output_api
.PresubmitPromptWarning(r
) for r
in results
]
1289 def _CheckContradictoryNotreachedUseInFile(input_api
, f
):
1291 'http://chromium.org/developers/coding-style'
1292 '#TOC-CHECK-DCHECK-and-NOTREACHED-')
1293 contents
= f
.NewContents()
1294 text
= ''.join(line
+ '\n' for line
in f
.NewContents())
1295 text
= _StripCommentsAndStrings(input_api
, text
)
1299 # Capture text between NOTREACHED(); and the next closing brace or "break".
1300 mo
= input_api
.re
.search(
1301 r
'[ \t]*NOTREACHED\(\s*\).*?;(?P<between>.*?)((\bbreak\b)|})',
1302 text
, input_api
.re
.DOTALL
)
1303 # TODO(tnagel): Catch loops inside which NOTREACHED() is followed by break.
1306 text
= text
[mo
.end():]
1307 if input_api
.re
.match(r
'[\s;]*$', mo
.group('between'), input_api
.re
.DOTALL
):
1309 excerpt
= mo
.group(0).rstrip()
1310 if len(excerpt
) > 100:
1311 excerpt
= excerpt
[:100] + ' \u2026' # ellipsis
1313 '%s: NOTREACHED() may only be used at end-of-block '
1314 'but is followed by code.\n%s\n'
1315 'Offending section (comments/strings possibly stripped):\n%s'
1316 % (f
, style_url
, excerpt
))
1320 def _CommonChecks(input_api
, output_api
):
1321 """Checks common to both upload and commit."""
1323 results
.extend(input_api
.canned_checks
.PanProjectChecks(
1324 input_api
, output_api
,
1325 excluded_paths
=_EXCLUDED_PATHS
+ _TESTRUNNER_PATHS
))
1326 results
.extend(_CheckAuthorizedAuthor(input_api
, output_api
))
1328 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api
, output_api
))
1329 results
.extend(_CheckNoIOStreamInHeaders(input_api
, output_api
))
1330 results
.extend(_CheckNoUNIT_TESTInSourceFiles(input_api
, output_api
))
1331 results
.extend(_CheckNoNewWStrings(input_api
, output_api
))
1332 results
.extend(_CheckNoDEPSGIT(input_api
, output_api
))
1333 results
.extend(_CheckNoBannedFunctions(input_api
, output_api
))
1334 results
.extend(_CheckNoPragmaOnce(input_api
, output_api
))
1335 results
.extend(_CheckNoTrinaryTrueFalse(input_api
, output_api
))
1336 results
.extend(_CheckUnwantedDependencies(input_api
, output_api
))
1337 results
.extend(_CheckFilePermissions(input_api
, output_api
))
1338 results
.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api
, output_api
))
1339 results
.extend(_CheckIncludeOrder(input_api
, output_api
))
1340 results
.extend(_CheckForVersionControlConflicts(input_api
, output_api
))
1341 results
.extend(_CheckPatchFiles(input_api
, output_api
))
1342 results
.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api
, output_api
))
1343 results
.extend(_CheckNoAbbreviationInPngFileName(input_api
, output_api
))
1344 results
.extend(_CheckForInvalidOSMacros(input_api
, output_api
))
1345 results
.extend(_CheckAddedDepsHaveTargetApprovals(input_api
, output_api
))
1347 input_api
.canned_checks
.CheckChangeHasNoTabs(
1350 source_file_filter
=lambda x
: x
.LocalPath().endswith('.grd')))
1351 results
.extend(_CheckSpamLogging(input_api
, output_api
))
1352 results
.extend(_CheckForAnonymousVariables(input_api
, output_api
))
1353 results
.extend(_CheckCygwinShell(input_api
, output_api
))
1354 results
.extend(_CheckUserActionUpdate(input_api
, output_api
))
1355 results
.extend(_CheckNoDeprecatedCSS(input_api
, output_api
))
1356 results
.extend(_CheckParseErrors(input_api
, output_api
))
1357 results
.extend(_CheckContradictoryNotreachedUse(input_api
, output_api
))
1359 if any('PRESUBMIT.py' == f
.LocalPath() for f
in input_api
.AffectedFiles()):
1360 results
.extend(input_api
.canned_checks
.RunUnitTestsInDirectory(
1361 input_api
, output_api
,
1362 input_api
.PresubmitLocalPath(),
1363 whitelist
=[r
'^PRESUBMIT_test\.py$']))
1367 def _CheckSubversionConfig(input_api
, output_api
):
1368 """Verifies the subversion config file is correctly setup.
1370 Checks that autoprops are enabled, returns an error otherwise.
1372 join
= input_api
.os_path
.join
1373 if input_api
.platform
== 'win32':
1374 appdata
= input_api
.environ
.get('APPDATA', '')
1376 return [output_api
.PresubmitError('%APPDATA% is not configured.')]
1377 path
= join(appdata
, 'Subversion', 'config')
1379 home
= input_api
.environ
.get('HOME', '')
1381 return [output_api
.PresubmitError('$HOME is not configured.')]
1382 path
= join(home
, '.subversion', 'config')
1385 'Please look at http://dev.chromium.org/developers/coding-style to\n'
1386 'configure your subversion configuration file. This enables automatic\n'
1387 'properties to simplify the project maintenance.\n'
1388 'Pro-tip: just download and install\n'
1389 'http://src.chromium.org/viewvc/chrome/trunk/tools/build/slave/config\n')
1392 lines
= open(path
, 'r').read().splitlines()
1393 # Make sure auto-props is enabled and check for 2 Chromium standard
1395 if (not '*.cc = svn:eol-style=LF' in lines
or
1396 not '*.pdf = svn:mime-type=application/pdf' in lines
or
1397 not 'enable-auto-props = yes' in lines
):
1399 output_api
.PresubmitNotifyResult(
1400 'It looks like you have not configured your subversion config '
1401 'file or it is not up-to-date.\n' + error_msg
)
1403 except (OSError, IOError):
1405 output_api
.PresubmitNotifyResult(
1406 'Can\'t find your subversion config file.\n' + error_msg
)
1411 def _CheckAuthorizedAuthor(input_api
, output_api
):
1412 """For non-googler/chromites committers, verify the author's email address is
1415 # TODO(maruel): Add it to input_api?
1418 author
= input_api
.change
.author_email
1420 input_api
.logging
.info('No author, skipping AUTHOR check')
1422 authors_path
= input_api
.os_path
.join(
1423 input_api
.PresubmitLocalPath(), 'AUTHORS')
1425 input_api
.re
.match(r
'[^#]+\s+\<(.+?)\>\s*$', line
)
1426 for line
in open(authors_path
))
1427 valid_authors
= [item
.group(1).lower() for item
in valid_authors
if item
]
1428 if not any(fnmatch
.fnmatch(author
.lower(), valid
) for valid
in valid_authors
):
1429 input_api
.logging
.info('Valid authors are %s', ', '.join(valid_authors
))
1430 return [output_api
.PresubmitPromptWarning(
1431 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1433 'http://www.chromium.org/developers/contributing-code and read the '
1435 'If you are a chromite, verify the contributor signed the CLA.') %
1440 def _CheckPatchFiles(input_api
, output_api
):
1441 problems
= [f
.LocalPath() for f
in input_api
.AffectedFiles()
1442 if f
.LocalPath().endswith(('.orig', '.rej'))]
1444 return [output_api
.PresubmitError(
1445 "Don't commit .rej and .orig files.", problems
)]
1450 def _DidYouMeanOSMacro(bad_macro
):
1452 return {'A': 'OS_ANDROID',
1462 'W': 'OS_WIN'}[bad_macro
[3].upper()]
1467 def _CheckForInvalidOSMacrosInFile(input_api
, f
):
1468 """Check for sensible looking, totally invalid OS macros."""
1469 preprocessor_statement
= input_api
.re
.compile(r
'^\s*#')
1470 os_macro
= input_api
.re
.compile(r
'defined\((OS_[^)]+)\)')
1472 for lnum
, line
in f
.ChangedContents():
1473 if preprocessor_statement
.search(line
):
1474 for match
in os_macro
.finditer(line
):
1475 if not match
.group(1) in _VALID_OS_MACROS
:
1476 good
= _DidYouMeanOSMacro(match
.group(1))
1477 did_you_mean
= ' (did you mean %s?)' % good
if good
else ''
1478 results
.append(' %s:%d %s%s' % (f
.LocalPath(),
1485 def _CheckForInvalidOSMacros(input_api
, output_api
):
1486 """Check all affected files for invalid OS macros."""
1488 for f
in input_api
.AffectedFiles():
1489 if not f
.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1490 bad_macros
.extend(_CheckForInvalidOSMacrosInFile(input_api
, f
))
1495 return [output_api
.PresubmitError(
1496 'Possibly invalid OS macro[s] found. Please fix your code\n'
1497 'or add your macro to src/PRESUBMIT.py.', bad_macros
)]
1500 def CheckChangeOnUpload(input_api
, output_api
):
1502 results
.extend(_CommonChecks(input_api
, output_api
))
1503 results
.extend(_CheckJavaStyle(input_api
, output_api
))
1507 def GetTryServerMasterForBot(bot
):
1508 """Returns the Try Server master for the given bot.
1510 Assumes that most Try Servers are on the tryserver.chromium master."""
1511 non_default_master_map
= {
1512 'linux_gpu': 'tryserver.chromium.gpu',
1513 'mac_gpu': 'tryserver.chromium.gpu',
1514 'win_gpu': 'tryserver.chromium.gpu',
1516 return non_default_master_map
.get(bot
, 'tryserver.chromium')
1519 def GetDefaultTryConfigs(bots
=None):
1520 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1522 To add tests to this list, they MUST be in the the corresponding master's
1523 gatekeeper config. For example, anything on master.chromium would be closed by
1524 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1526 If 'bots' is specified, will only return configurations for bots in that list.
1532 'cacheinvalidation_unittests',
1535 'content_browsertests',
1536 'content_unittests',
1539 'interactive_ui_tests',
1545 'printing_unittests',
1549 # Broken in release.
1551 #'webkit_unit_tests',
1554 builders_and_tests
= {
1555 # TODO(maruel): Figure out a way to run 'sizes' where people can
1556 # effectively update the perf expectation correctly. This requires a
1557 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1558 # incremental build. Reference:
1559 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1560 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1561 # of this step as a try job failure.
1562 'android_aosp': ['compile'],
1563 'android_chromium_gn_compile_rel': ['compile'],
1564 'android_clang_dbg': ['slave_steps'],
1565 'android_dbg': ['slave_steps'],
1566 'cros_x86': ['defaulttests'],
1567 'ios_dbg_simulator': [
1570 'content_unittests',
1577 'ios_rel_device': ['compile'],
1578 'linux_asan': ['compile'],
1579 'mac_asan': ['compile'],
1580 #TODO(stip): Change the name of this builder to reflect that it's release.
1581 'linux_gtk': standard_tests
,
1582 'linux_chromeos_asan': ['compile'],
1583 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1584 'linux_chromium_chromeos_rel': ['defaulttests'],
1585 'linux_chromium_compile_dbg': ['defaulttests'],
1586 'linux_chromium_gn_rel': ['defaulttests'],
1587 'linux_chromium_rel': ['defaulttests'],
1588 'linux_chromium_clang_dbg': ['defaulttests'],
1589 'linux_gpu': ['defaulttests'],
1590 'linux_nacl_sdk_build': ['compile'],
1591 'mac_chromium_compile_dbg': ['defaulttests'],
1592 'mac_chromium_rel': ['defaulttests'],
1593 'mac_gpu': ['defaulttests'],
1594 'mac_nacl_sdk_build': ['compile'],
1595 'win_chromium_compile_dbg': ['defaulttests'],
1596 'win_chromium_dbg': ['defaulttests'],
1597 'win_chromium_rel': ['defaulttests'],
1598 'win_chromium_x64_rel': ['defaulttests'],
1599 'win_gpu': ['defaulttests'],
1600 'win_nacl_sdk_build': ['compile'],
1604 filtered_builders_and_tests
= dict((bot
, set(builders_and_tests
[bot
]))
1607 filtered_builders_and_tests
= dict(
1609 for bot
, tests
in builders_and_tests
.iteritems())
1611 # Build up the mapping from tryserver master to bot/test.
1613 for bot
, tests
in filtered_builders_and_tests
.iteritems():
1614 out
.setdefault(GetTryServerMasterForBot(bot
), {})[bot
] = tests
1618 def CheckChangeOnCommit(input_api
, output_api
):
1620 results
.extend(_CommonChecks(input_api
, output_api
))
1621 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1622 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1623 # input_api, output_api, sources))
1624 # Make sure the tree is 'open'.
1625 results
.extend(input_api
.canned_checks
.CheckTreeIsOpen(
1628 json_url
='http://chromium-status.appspot.com/current?format=json'))
1630 results
.extend(input_api
.canned_checks
.CheckChangeHasBugField(
1631 input_api
, output_api
))
1632 results
.extend(input_api
.canned_checks
.CheckChangeHasDescription(
1633 input_api
, output_api
))
1634 results
.extend(_CheckSubversionConfig(input_api
, output_api
))
1638 def GetPreferredTryMasters(project
, change
):
1639 files
= change
.LocalPaths()
1641 if not files
or all(re
.search(r
'[\\/]OWNERS$', f
) for f
in files
):
1644 if all(re
.search('\.(m|mm)$|(^|[/_])mac[/_.]', f
) for f
in files
):
1645 return GetDefaultTryConfigs([
1646 'mac_chromium_compile_dbg',
1649 if all(re
.search('(^|[/_])win[/_.]', f
) for f
in files
):
1650 return GetDefaultTryConfigs(['win_chromium_dbg', 'win_chromium_rel'])
1651 if all(re
.search('(^|[/_])android[/_.]', f
) for f
in files
):
1652 return GetDefaultTryConfigs([
1654 'android_clang_dbg',
1657 if all(re
.search('[/_]ios[/_.]', f
) for f
in files
):
1658 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1661 'android_chromium_gn_compile_rel',
1662 'android_clang_dbg',
1664 'ios_dbg_simulator',
1666 'linux_chromium_chromeos_rel',
1667 'linux_chromium_clang_dbg',
1668 'linux_chromium_gn_rel',
1669 'linux_chromium_rel',
1671 'mac_chromium_compile_dbg',
1674 'win_chromium_compile_dbg',
1676 'win_chromium_x64_rel',
1680 # Match things like path/aura/file.cc and path/file_aura.cc.
1681 # Same for chromeos.
1682 if any(re
.search('[/_](aura|chromeos)', f
) for f
in files
):
1684 'linux_chromeos_asan',
1685 'linux_chromium_chromeos_clang_dbg'
1688 # If there are gyp changes to base, build, or chromeos, run a full cros build
1689 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1690 # files have a much higher chance of breaking the cros build, which is
1691 # differnt from the linux_chromeos build that most chrome developers test
1693 if any(re
.search('^(base|build|chromeos).*\.gypi?$', f
) for f
in files
):
1694 builders
.extend(['cros_x86'])
1696 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1697 # unless they're .gyp(i) files as changes to those files can break the gyp
1699 if (not all(re
.search('^chrome', f
) for f
in files
) or
1700 any(re
.search('\.gypi?$', f
) for f
in files
)):
1701 builders
.extend(['android_aosp'])
1703 return GetDefaultTryConfigs(builders
)