[PowerProfiler] Power Profiler service should detect the sampling rate from the data...
[chromium-blink-merge.git] / PRESUBMIT.py
blob30dbb193b8d4918dde0f645c12ceae0766650c05
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into gcl.
9 """
12 import re
13 import sys
16 _EXCLUDED_PATHS = (
17 r"^breakpad[\\\/].*",
18 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
19 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
20 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
21 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
22 r"^skia[\\\/].*",
23 r"^v8[\\\/].*",
24 r".*MakeFile$",
25 r".+_autogen\.h$",
26 r".+[\\\/]pnacl_shim\.c$",
27 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
28 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
31 # TestRunner and NetscapePlugIn library is temporarily excluded from pan-project
32 # checks until it's transitioned to chromium coding style.
33 _TESTRUNNER_PATHS = (
34 r"^content[\\\/]shell[\\\/]renderer[\\\/]test_runner[\\\/].*",
35 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
38 # Fragment of a regular expression that matches C++ and Objective-C++
39 # implementation files.
40 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
42 # Regular expression that matches code only used for test binaries
43 # (best effort).
44 _TEST_CODE_EXCLUDED_PATHS = (
45 r'.*[/\\](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
47 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
48 _IMPLEMENTATION_EXTENSIONS,
49 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
50 r'.*[/\\](test|tool(s)?)[/\\].*',
51 # content_shell is used for running layout tests.
52 r'content[/\\]shell[/\\].*',
53 # At request of folks maintaining this folder.
54 r'chrome[/\\]browser[/\\]automation[/\\].*',
55 # Non-production example code.
56 r'mojo[/\\]examples[/\\].*',
59 _TEST_ONLY_WARNING = (
60 'You might be calling functions intended only for testing from\n'
61 'production code. It is OK to ignore this warning if you know what\n'
62 'you are doing, as the heuristics used to detect the situation are\n'
63 'not perfect. The commit queue will not block on this warning.')
66 _INCLUDE_ORDER_WARNING = (
67 'Your #include order seems to be broken. Send mail to\n'
68 'marja@chromium.org if this is not the case.')
71 _BANNED_OBJC_FUNCTIONS = (
73 'addTrackingRect:',
75 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
76 'prohibited. Please use CrTrackingArea instead.',
77 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
79 False,
82 r'/NSTrackingArea\W',
84 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
85 'instead.',
86 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
88 False,
91 'convertPointFromBase:',
93 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
94 'Please use |convertPoint:(point) fromView:nil| instead.',
95 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
97 True,
100 'convertPointToBase:',
102 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
103 'Please use |convertPoint:(point) toView:nil| instead.',
104 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
106 True,
109 'convertRectFromBase:',
111 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
112 'Please use |convertRect:(point) fromView:nil| instead.',
113 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
115 True,
118 'convertRectToBase:',
120 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
121 'Please use |convertRect:(point) toView:nil| instead.',
122 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
124 True,
127 'convertSizeFromBase:',
129 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
130 'Please use |convertSize:(point) fromView:nil| instead.',
131 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
133 True,
136 'convertSizeToBase:',
138 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
139 'Please use |convertSize:(point) toView:nil| instead.',
140 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
142 True,
147 _BANNED_CPP_FUNCTIONS = (
148 # Make sure that gtest's FRIEND_TEST() macro is not used; the
149 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
150 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
152 'FRIEND_TEST(',
154 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
155 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
157 False,
161 'ScopedAllowIO',
163 'New code should not use ScopedAllowIO. Post a task to the blocking',
164 'pool or the FILE thread instead.',
166 True,
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_loader\.cc$",
169 r"^components[\\\/]breakpad[\\\/]app[\\\/]breakpad_mac\.mm$",
170 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
172 r"^mojo[\\\/]system[\\\/]raw_shared_buffer_posix\.cc$",
173 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
174 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
178 'SkRefPtr',
180 'The use of SkRefPtr is prohibited. ',
181 'Please use skia::RefPtr instead.'
183 True,
187 'SkAutoRef',
189 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
190 'Please use skia::RefPtr instead.'
192 True,
196 'SkAutoTUnref',
198 'The use of SkAutoTUnref is dangerous because it implicitly ',
199 'converts to a raw pointer. Please use skia::RefPtr instead.'
201 True,
205 'SkAutoUnref',
207 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
208 'because it implicitly converts to a raw pointer. ',
209 'Please use skia::RefPtr instead.'
211 True,
215 r'/HANDLE_EINTR\(.*close',
217 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
218 'descriptor will be closed, and it is incorrect to retry the close.',
219 'Either call close directly and ignore its return value, or wrap close',
220 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
222 True,
226 r'/IGNORE_EINTR\((?!.*close)',
228 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
229 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
231 True,
233 # Files that #define IGNORE_EINTR.
234 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
235 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
239 r'/v8::Extension\(',
241 'Do not introduce new v8::Extensions into the code base, use',
242 'gin::Wrappable instead. See http://crbug.com/334679',
244 True,
246 r'extensions[/\\]renderer[/\\]safe_builtins\.*',
252 _VALID_OS_MACROS = (
253 # Please keep sorted.
254 'OS_ANDROID',
255 'OS_ANDROID_HOST',
256 'OS_BSD',
257 'OS_CAT', # For testing.
258 'OS_CHROMEOS',
259 'OS_FREEBSD',
260 'OS_IOS',
261 'OS_LINUX',
262 'OS_MACOSX',
263 'OS_NACL',
264 'OS_OPENBSD',
265 'OS_POSIX',
266 'OS_QNX',
267 'OS_SOLARIS',
268 'OS_WIN',
272 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
273 """Attempts to prevent use of functions intended only for testing in
274 non-testing code. For now this is just a best-effort implementation
275 that ignores header files and may have some false positives. A
276 better implementation would probably need a proper C++ parser.
278 # We only scan .cc files and the like, as the declaration of
279 # for-testing functions in header files are hard to distinguish from
280 # calls to such functions without a proper C++ parser.
281 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
283 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
284 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
285 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
286 exclusion_pattern = input_api.re.compile(
287 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
288 base_function_pattern, base_function_pattern))
290 def FilterFile(affected_file):
291 black_list = (_EXCLUDED_PATHS +
292 _TEST_CODE_EXCLUDED_PATHS +
293 input_api.DEFAULT_BLACK_LIST)
294 return input_api.FilterSourceFile(
295 affected_file,
296 white_list=(file_inclusion_pattern, ),
297 black_list=black_list)
299 problems = []
300 for f in input_api.AffectedSourceFiles(FilterFile):
301 local_path = f.LocalPath()
302 for line_number, line in f.ChangedContents():
303 if (inclusion_pattern.search(line) and
304 not comment_pattern.search(line) and
305 not exclusion_pattern.search(line)):
306 problems.append(
307 '%s:%d\n %s' % (local_path, line_number, line.strip()))
309 if problems:
310 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
311 else:
312 return []
315 def _CheckNoIOStreamInHeaders(input_api, output_api):
316 """Checks to make sure no .h files include <iostream>."""
317 files = []
318 pattern = input_api.re.compile(r'^#include\s*<iostream>',
319 input_api.re.MULTILINE)
320 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
321 if not f.LocalPath().endswith('.h'):
322 continue
323 contents = input_api.ReadFile(f)
324 if pattern.search(contents):
325 files.append(f)
327 if len(files):
328 return [ output_api.PresubmitError(
329 'Do not #include <iostream> in header files, since it inserts static '
330 'initialization into every file including the header. Instead, '
331 '#include <ostream>. See http://crbug.com/94794',
332 files) ]
333 return []
336 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
337 """Checks to make sure no source files use UNIT_TEST"""
338 problems = []
339 for f in input_api.AffectedFiles():
340 if (not f.LocalPath().endswith(('.cc', '.mm'))):
341 continue
343 for line_num, line in f.ChangedContents():
344 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
345 problems.append(' %s:%d' % (f.LocalPath(), line_num))
347 if not problems:
348 return []
349 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
350 '\n'.join(problems))]
353 def _CheckNoNewWStrings(input_api, output_api):
354 """Checks to make sure we don't introduce use of wstrings."""
355 problems = []
356 for f in input_api.AffectedFiles():
357 if (not f.LocalPath().endswith(('.cc', '.h')) or
358 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h'))):
359 continue
361 allowWString = False
362 for line_num, line in f.ChangedContents():
363 if 'presubmit: allow wstring' in line:
364 allowWString = True
365 elif not allowWString and 'wstring' in line:
366 problems.append(' %s:%d' % (f.LocalPath(), line_num))
367 allowWString = False
368 else:
369 allowWString = False
371 if not problems:
372 return []
373 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
374 ' If you are calling a cross-platform API that accepts a wstring, '
375 'fix the API.\n' +
376 '\n'.join(problems))]
379 def _CheckNoDEPSGIT(input_api, output_api):
380 """Make sure .DEPS.git is never modified manually."""
381 if any(f.LocalPath().endswith('.DEPS.git') for f in
382 input_api.AffectedFiles()):
383 return [output_api.PresubmitError(
384 'Never commit changes to .DEPS.git. This file is maintained by an\n'
385 'automated system based on what\'s in DEPS and your changes will be\n'
386 'overwritten.\n'
387 'See http://code.google.com/p/chromium/wiki/UsingNewGit#Rolling_DEPS\n'
388 'for more information')]
389 return []
392 def _CheckNoBannedFunctions(input_api, output_api):
393 """Make sure that banned functions are not used."""
394 warnings = []
395 errors = []
397 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
398 for f in input_api.AffectedFiles(file_filter=file_filter):
399 for line_num, line in f.ChangedContents():
400 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
401 matched = False
402 if func_name[0:1] == '/':
403 regex = func_name[1:]
404 if input_api.re.search(regex, line):
405 matched = True
406 elif func_name in line:
407 matched = True
408 if matched:
409 problems = warnings;
410 if error:
411 problems = errors;
412 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
413 for message_line in message:
414 problems.append(' %s' % message_line)
416 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
417 for f in input_api.AffectedFiles(file_filter=file_filter):
418 for line_num, line in f.ChangedContents():
419 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
420 def IsBlacklisted(affected_file, blacklist):
421 local_path = affected_file.LocalPath()
422 for item in blacklist:
423 if input_api.re.match(item, local_path):
424 return True
425 return False
426 if IsBlacklisted(f, excluded_paths):
427 continue
428 matched = False
429 if func_name[0:1] == '/':
430 regex = func_name[1:]
431 if input_api.re.search(regex, line):
432 matched = True
433 elif func_name in line:
434 matched = True
435 if matched:
436 problems = warnings;
437 if error:
438 problems = errors;
439 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
440 for message_line in message:
441 problems.append(' %s' % message_line)
443 result = []
444 if (warnings):
445 result.append(output_api.PresubmitPromptWarning(
446 'Banned functions were used.\n' + '\n'.join(warnings)))
447 if (errors):
448 result.append(output_api.PresubmitError(
449 'Banned functions were used.\n' + '\n'.join(errors)))
450 return result
453 def _CheckNoPragmaOnce(input_api, output_api):
454 """Make sure that banned functions are not used."""
455 files = []
456 pattern = input_api.re.compile(r'^#pragma\s+once',
457 input_api.re.MULTILINE)
458 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
459 if not f.LocalPath().endswith('.h'):
460 continue
461 contents = input_api.ReadFile(f)
462 if pattern.search(contents):
463 files.append(f)
465 if files:
466 return [output_api.PresubmitError(
467 'Do not use #pragma once in header files.\n'
468 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
469 files)]
470 return []
473 def _CheckNoTrinaryTrueFalse(input_api, output_api):
474 """Checks to make sure we don't introduce use of foo ? true : false."""
475 problems = []
476 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
477 for f in input_api.AffectedFiles():
478 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
479 continue
481 for line_num, line in f.ChangedContents():
482 if pattern.match(line):
483 problems.append(' %s:%d' % (f.LocalPath(), line_num))
485 if not problems:
486 return []
487 return [output_api.PresubmitPromptWarning(
488 'Please consider avoiding the "? true : false" pattern if possible.\n' +
489 '\n'.join(problems))]
492 def _CheckUnwantedDependencies(input_api, output_api):
493 """Runs checkdeps on #include statements added in this
494 change. Breaking - rules is an error, breaking ! rules is a
495 warning.
497 # We need to wait until we have an input_api object and use this
498 # roundabout construct to import checkdeps because this file is
499 # eval-ed and thus doesn't have __file__.
500 original_sys_path = sys.path
501 try:
502 sys.path = sys.path + [input_api.os_path.join(
503 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
504 import checkdeps
505 from cpp_checker import CppChecker
506 from rules import Rule
507 finally:
508 # Restore sys.path to what it was before.
509 sys.path = original_sys_path
511 added_includes = []
512 for f in input_api.AffectedFiles():
513 if not CppChecker.IsCppFile(f.LocalPath()):
514 continue
516 changed_lines = [line for line_num, line in f.ChangedContents()]
517 added_includes.append([f.LocalPath(), changed_lines])
519 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
521 error_descriptions = []
522 warning_descriptions = []
523 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
524 added_includes):
525 description_with_path = '%s\n %s' % (path, rule_description)
526 if rule_type == Rule.DISALLOW:
527 error_descriptions.append(description_with_path)
528 else:
529 warning_descriptions.append(description_with_path)
531 results = []
532 if error_descriptions:
533 results.append(output_api.PresubmitError(
534 'You added one or more #includes that violate checkdeps rules.',
535 error_descriptions))
536 if warning_descriptions:
537 results.append(output_api.PresubmitPromptOrNotify(
538 'You added one or more #includes of files that are temporarily\n'
539 'allowed but being removed. Can you avoid introducing the\n'
540 '#include? See relevant DEPS file(s) for details and contacts.',
541 warning_descriptions))
542 return results
545 def _CheckFilePermissions(input_api, output_api):
546 """Check that all files have their permissions properly set."""
547 if input_api.platform == 'win32':
548 return []
549 args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
550 input_api.change.RepositoryRoot()]
551 for f in input_api.AffectedFiles():
552 args += ['--file', f.LocalPath()]
553 checkperms = input_api.subprocess.Popen(args,
554 stdout=input_api.subprocess.PIPE)
555 errors = checkperms.communicate()[0].strip()
556 if errors:
557 return [output_api.PresubmitError('checkperms.py failed.',
558 errors.splitlines())]
559 return []
562 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
563 """Makes sure we don't include ui/aura/window_property.h
564 in header files.
566 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
567 errors = []
568 for f in input_api.AffectedFiles():
569 if not f.LocalPath().endswith('.h'):
570 continue
571 for line_num, line in f.ChangedContents():
572 if pattern.match(line):
573 errors.append(' %s:%d' % (f.LocalPath(), line_num))
575 results = []
576 if errors:
577 results.append(output_api.PresubmitError(
578 'Header files should not include ui/aura/window_property.h', errors))
579 return results
582 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
583 """Checks that the lines in scope occur in the right order.
585 1. C system files in alphabetical order
586 2. C++ system files in alphabetical order
587 3. Project's .h files
590 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
591 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
592 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
594 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
596 state = C_SYSTEM_INCLUDES
598 previous_line = ''
599 previous_line_num = 0
600 problem_linenums = []
601 for line_num, line in scope:
602 if c_system_include_pattern.match(line):
603 if state != C_SYSTEM_INCLUDES:
604 problem_linenums.append((line_num, previous_line_num))
605 elif previous_line and previous_line > line:
606 problem_linenums.append((line_num, previous_line_num))
607 elif cpp_system_include_pattern.match(line):
608 if state == C_SYSTEM_INCLUDES:
609 state = CPP_SYSTEM_INCLUDES
610 elif state == CUSTOM_INCLUDES:
611 problem_linenums.append((line_num, previous_line_num))
612 elif previous_line and previous_line > line:
613 problem_linenums.append((line_num, previous_line_num))
614 elif custom_include_pattern.match(line):
615 if state != CUSTOM_INCLUDES:
616 state = CUSTOM_INCLUDES
617 elif previous_line and previous_line > line:
618 problem_linenums.append((line_num, previous_line_num))
619 else:
620 problem_linenums.append(line_num)
621 previous_line = line
622 previous_line_num = line_num
624 warnings = []
625 for (line_num, previous_line_num) in problem_linenums:
626 if line_num in changed_linenums or previous_line_num in changed_linenums:
627 warnings.append(' %s:%d' % (file_path, line_num))
628 return warnings
631 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
632 """Checks the #include order for the given file f."""
634 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
635 # Exclude the following includes from the check:
636 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
637 # specific order.
638 # 2) <atlbase.h>, "build/build_config.h"
639 excluded_include_pattern = input_api.re.compile(
640 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
641 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
642 # Match the final or penultimate token if it is xxxtest so we can ignore it
643 # when considering the special first include.
644 test_file_tag_pattern = input_api.re.compile(
645 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
646 if_pattern = input_api.re.compile(
647 r'\s*#\s*(if|elif|else|endif|define|undef).*')
648 # Some files need specialized order of includes; exclude such files from this
649 # check.
650 uncheckable_includes_pattern = input_api.re.compile(
651 r'\s*#include '
652 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
654 contents = f.NewContents()
655 warnings = []
656 line_num = 0
658 # Handle the special first include. If the first include file is
659 # some/path/file.h, the corresponding including file can be some/path/file.cc,
660 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
661 # etc. It's also possible that no special first include exists.
662 # If the included file is some/path/file_platform.h the including file could
663 # also be some/path/file_xxxtest_platform.h.
664 including_file_base_name = test_file_tag_pattern.sub(
665 '', input_api.os_path.basename(f.LocalPath()))
667 for line in contents:
668 line_num += 1
669 if system_include_pattern.match(line):
670 # No special first include -> process the line again along with normal
671 # includes.
672 line_num -= 1
673 break
674 match = custom_include_pattern.match(line)
675 if match:
676 match_dict = match.groupdict()
677 header_basename = test_file_tag_pattern.sub(
678 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
680 if header_basename not in including_file_base_name:
681 # No special first include -> process the line again along with normal
682 # includes.
683 line_num -= 1
684 break
686 # Split into scopes: Each region between #if and #endif is its own scope.
687 scopes = []
688 current_scope = []
689 for line in contents[line_num:]:
690 line_num += 1
691 if uncheckable_includes_pattern.match(line):
692 continue
693 if if_pattern.match(line):
694 scopes.append(current_scope)
695 current_scope = []
696 elif ((system_include_pattern.match(line) or
697 custom_include_pattern.match(line)) and
698 not excluded_include_pattern.match(line)):
699 current_scope.append((line_num, line))
700 scopes.append(current_scope)
702 for scope in scopes:
703 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
704 changed_linenums))
705 return warnings
708 def _CheckIncludeOrder(input_api, output_api):
709 """Checks that the #include order is correct.
711 1. The corresponding header for source files.
712 2. C system files in alphabetical order
713 3. C++ system files in alphabetical order
714 4. Project's .h files in alphabetical order
716 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
717 these rules separately.
720 warnings = []
721 for f in input_api.AffectedFiles():
722 if f.LocalPath().endswith(('.cc', '.h')):
723 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
724 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
726 results = []
727 if warnings:
728 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
729 warnings))
730 return results
733 def _CheckForVersionControlConflictsInFile(input_api, f):
734 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
735 errors = []
736 for line_num, line in f.ChangedContents():
737 if pattern.match(line):
738 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
739 return errors
742 def _CheckForVersionControlConflicts(input_api, output_api):
743 """Usually this is not intentional and will cause a compile failure."""
744 errors = []
745 for f in input_api.AffectedFiles():
746 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
748 results = []
749 if errors:
750 results.append(output_api.PresubmitError(
751 'Version control conflict markers found, please resolve.', errors))
752 return results
755 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
756 def FilterFile(affected_file):
757 """Filter function for use with input_api.AffectedSourceFiles,
758 below. This filters out everything except non-test files from
759 top-level directories that generally speaking should not hard-code
760 service URLs (e.g. src/android_webview/, src/content/ and others).
762 return input_api.FilterSourceFile(
763 affected_file,
764 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
765 black_list=(_EXCLUDED_PATHS +
766 _TEST_CODE_EXCLUDED_PATHS +
767 input_api.DEFAULT_BLACK_LIST))
769 base_pattern = '"[^"]*google\.com[^"]*"'
770 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
771 pattern = input_api.re.compile(base_pattern)
772 problems = [] # items are (filename, line_number, line)
773 for f in input_api.AffectedSourceFiles(FilterFile):
774 for line_num, line in f.ChangedContents():
775 if not comment_pattern.search(line) and pattern.search(line):
776 problems.append((f.LocalPath(), line_num, line))
778 if problems:
779 return [output_api.PresubmitPromptOrNotify(
780 'Most layers below src/chrome/ should not hardcode service URLs.\n'
781 'Are you sure this is correct?',
782 [' %s:%d: %s' % (
783 problem[0], problem[1], problem[2]) for problem in problems])]
784 else:
785 return []
788 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
789 """Makes sure there are no abbreviations in the name of PNG files.
791 pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
792 errors = []
793 for f in input_api.AffectedFiles(include_deletes=False):
794 if pattern.match(f.LocalPath()):
795 errors.append(' %s' % f.LocalPath())
797 results = []
798 if errors:
799 results.append(output_api.PresubmitError(
800 'The name of PNG files should not have abbreviations. \n'
801 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
802 'Contact oshima@chromium.org if you have questions.', errors))
803 return results
806 def _FilesToCheckForIncomingDeps(re, changed_lines):
807 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
808 a set of DEPS entries that we should look up.
810 For a directory (rather than a specific filename) we fake a path to
811 a specific filename by adding /DEPS. This is chosen as a file that
812 will seldom or never be subject to per-file include_rules.
814 # We ignore deps entries on auto-generated directories.
815 AUTO_GENERATED_DIRS = ['grit', 'jni']
817 # This pattern grabs the path without basename in the first
818 # parentheses, and the basename (if present) in the second. It
819 # relies on the simple heuristic that if there is a basename it will
820 # be a header file ending in ".h".
821 pattern = re.compile(
822 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
823 results = set()
824 for changed_line in changed_lines:
825 m = pattern.match(changed_line)
826 if m:
827 path = m.group(1)
828 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
829 if m.group(2):
830 results.add('%s%s' % (path, m.group(2)))
831 else:
832 results.add('%s/DEPS' % path)
833 return results
836 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
837 """When a dependency prefixed with + is added to a DEPS file, we
838 want to make sure that the change is reviewed by an OWNER of the
839 target file or directory, to avoid layering violations from being
840 introduced. This check verifies that this happens.
842 changed_lines = set()
843 for f in input_api.AffectedFiles():
844 filename = input_api.os_path.basename(f.LocalPath())
845 if filename == 'DEPS':
846 changed_lines |= set(line.strip()
847 for line_num, line
848 in f.ChangedContents())
849 if not changed_lines:
850 return []
852 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
853 changed_lines)
854 if not virtual_depended_on_files:
855 return []
857 if input_api.is_committing:
858 if input_api.tbr:
859 return [output_api.PresubmitNotifyResult(
860 '--tbr was specified, skipping OWNERS check for DEPS additions')]
861 if not input_api.change.issue:
862 return [output_api.PresubmitError(
863 "DEPS approval by OWNERS check failed: this change has "
864 "no Rietveld issue number, so we can't check it for approvals.")]
865 output = output_api.PresubmitError
866 else:
867 output = output_api.PresubmitNotifyResult
869 owners_db = input_api.owners_db
870 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
871 input_api,
872 owners_db.email_regexp,
873 approval_needed=input_api.is_committing)
875 owner_email = owner_email or input_api.change.author_email
877 reviewers_plus_owner = set(reviewers)
878 if owner_email:
879 reviewers_plus_owner.add(owner_email)
880 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
881 reviewers_plus_owner)
883 # We strip the /DEPS part that was added by
884 # _FilesToCheckForIncomingDeps to fake a path to a file in a
885 # directory.
886 def StripDeps(path):
887 start_deps = path.rfind('/DEPS')
888 if start_deps != -1:
889 return path[:start_deps]
890 else:
891 return path
892 unapproved_dependencies = ["'+%s'," % StripDeps(path)
893 for path in missing_files]
895 if unapproved_dependencies:
896 output_list = [
897 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
898 '\n '.join(sorted(unapproved_dependencies)))]
899 if not input_api.is_committing:
900 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
901 output_list.append(output(
902 'Suggested missing target path OWNERS:\n %s' %
903 '\n '.join(suggested_owners or [])))
904 return output_list
906 return []
909 def _CheckSpamLogging(input_api, output_api):
910 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
911 black_list = (_EXCLUDED_PATHS +
912 _TEST_CODE_EXCLUDED_PATHS +
913 input_api.DEFAULT_BLACK_LIST +
914 (r"^base[\\\/]logging\.h$",
915 r"^base[\\\/]logging\.cc$",
916 r"^cloud_print[\\\/]",
917 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
918 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
919 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
920 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
921 r"startup_browser_creator\.cc$",
922 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
923 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
924 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
925 r"gl_helper_benchmark\.cc$",
926 r"^native_client_sdk[\\\/]",
927 r"^remoting[\\\/]base[\\\/]logging\.h$",
928 r"^remoting[\\\/]host[\\\/].*",
929 r"^sandbox[\\\/]linux[\\\/].*",
930 r"^tools[\\\/]",
931 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",))
932 source_file_filter = lambda x: input_api.FilterSourceFile(
933 x, white_list=(file_inclusion_pattern,), black_list=black_list)
935 log_info = []
936 printf = []
938 for f in input_api.AffectedSourceFiles(source_file_filter):
939 contents = input_api.ReadFile(f, 'rb')
940 if re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
941 log_info.append(f.LocalPath())
942 elif re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
943 log_info.append(f.LocalPath())
945 if re.search(r"\bprintf\(", contents):
946 printf.append(f.LocalPath())
947 elif re.search(r"\bfprintf\((stdout|stderr)", contents):
948 printf.append(f.LocalPath())
950 if log_info:
951 return [output_api.PresubmitError(
952 'These files spam the console log with LOG(INFO):',
953 items=log_info)]
954 if printf:
955 return [output_api.PresubmitError(
956 'These files spam the console log with printf/fprintf:',
957 items=printf)]
958 return []
961 def _CheckForAnonymousVariables(input_api, output_api):
962 """These types are all expected to hold locks while in scope and
963 so should never be anonymous (which causes them to be immediately
964 destroyed)."""
965 they_who_must_be_named = [
966 'base::AutoLock',
967 'base::AutoReset',
968 'base::AutoUnlock',
969 'SkAutoAlphaRestore',
970 'SkAutoBitmapShaderInstall',
971 'SkAutoBlitterChoose',
972 'SkAutoBounderCommit',
973 'SkAutoCallProc',
974 'SkAutoCanvasRestore',
975 'SkAutoCommentBlock',
976 'SkAutoDescriptor',
977 'SkAutoDisableDirectionCheck',
978 'SkAutoDisableOvalCheck',
979 'SkAutoFree',
980 'SkAutoGlyphCache',
981 'SkAutoHDC',
982 'SkAutoLockColors',
983 'SkAutoLockPixels',
984 'SkAutoMalloc',
985 'SkAutoMaskFreeImage',
986 'SkAutoMutexAcquire',
987 'SkAutoPathBoundsUpdate',
988 'SkAutoPDFRelease',
989 'SkAutoRasterClipValidate',
990 'SkAutoRef',
991 'SkAutoTime',
992 'SkAutoTrace',
993 'SkAutoUnref',
995 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
996 # bad: base::AutoLock(lock.get());
997 # not bad: base::AutoLock lock(lock.get());
998 bad_pattern = input_api.re.compile(anonymous)
999 # good: new base::AutoLock(lock.get())
1000 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1001 errors = []
1003 for f in input_api.AffectedFiles():
1004 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1005 continue
1006 for linenum, line in f.ChangedContents():
1007 if bad_pattern.search(line) and not good_pattern.search(line):
1008 errors.append('%s:%d' % (f.LocalPath(), linenum))
1010 if errors:
1011 return [output_api.PresubmitError(
1012 'These lines create anonymous variables that need to be named:',
1013 items=errors)]
1014 return []
1017 def _CheckCygwinShell(input_api, output_api):
1018 source_file_filter = lambda x: input_api.FilterSourceFile(
1019 x, white_list=(r'.+\.(gyp|gypi)$',))
1020 cygwin_shell = []
1022 for f in input_api.AffectedSourceFiles(source_file_filter):
1023 for linenum, line in f.ChangedContents():
1024 if 'msvs_cygwin_shell' in line:
1025 cygwin_shell.append(f.LocalPath())
1026 break
1028 if cygwin_shell:
1029 return [output_api.PresubmitError(
1030 'These files should not use msvs_cygwin_shell (the default is 0):',
1031 items=cygwin_shell)]
1032 return []
1035 def _CheckUserActionUpdate(input_api, output_api):
1036 """Checks if any new user action has been added."""
1037 if any('actions.xml' == input_api.os_path.basename(f) for f in
1038 input_api.LocalPaths()):
1039 # If actions.xml is already included in the changelist, the PRESUBMIT
1040 # for actions.xml will do a more complete presubmit check.
1041 return []
1043 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1044 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1045 current_actions = None
1046 for f in input_api.AffectedFiles(file_filter=file_filter):
1047 for line_num, line in f.ChangedContents():
1048 match = input_api.re.search(action_re, line)
1049 if match:
1050 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1051 # loaded only once.
1052 if not current_actions:
1053 with open('tools/metrics/actions/actions.xml') as actions_f:
1054 current_actions = actions_f.read()
1055 # Search for the matched user action name in |current_actions|.
1056 for action_name in match.groups():
1057 action = 'name="{0}"'.format(action_name)
1058 if action not in current_actions:
1059 return [output_api.PresubmitPromptWarning(
1060 'File %s line %d: %s is missing in '
1061 'tools/metrics/actions/actions.xml. Please run '
1062 'tools/metrics/actions/extract_actions.py to update.'
1063 % (f.LocalPath(), line_num, action_name))]
1064 return []
1067 def _GetJSONParseError(input_api, filename, eat_comments=True):
1068 try:
1069 contents = input_api.ReadFile(filename)
1070 if eat_comments:
1071 json_comment_eater = input_api.os_path.join(
1072 input_api.PresubmitLocalPath(),
1073 'tools', 'json_comment_eater', 'json_comment_eater.py')
1074 process = input_api.subprocess.Popen(
1075 [input_api.python_executable, json_comment_eater],
1076 stdin=input_api.subprocess.PIPE,
1077 stdout=input_api.subprocess.PIPE,
1078 universal_newlines=True)
1079 (contents, _) = process.communicate(input=contents)
1081 input_api.json.loads(contents)
1082 except ValueError as e:
1083 return e
1084 return None
1087 def _GetIDLParseError(input_api, filename):
1088 try:
1089 contents = input_api.ReadFile(filename)
1090 idl_schema = input_api.os_path.join(
1091 input_api.PresubmitLocalPath(),
1092 'tools', 'json_schema_compiler', 'idl_schema.py')
1093 process = input_api.subprocess.Popen(
1094 [input_api.python_executable, idl_schema],
1095 stdin=input_api.subprocess.PIPE,
1096 stdout=input_api.subprocess.PIPE,
1097 stderr=input_api.subprocess.PIPE,
1098 universal_newlines=True)
1099 (_, error) = process.communicate(input=contents)
1100 return error or None
1101 except ValueError as e:
1102 return e
1105 def _CheckParseErrors(input_api, output_api):
1106 """Check that IDL and JSON files do not contain syntax errors."""
1107 actions = {
1108 '.idl': _GetIDLParseError,
1109 '.json': _GetJSONParseError,
1111 # These paths contain test data and other known invalid JSON files.
1112 excluded_patterns = [
1113 'test/data/',
1114 '^components/policy/resources/policy_templates.json$',
1116 # Most JSON files are preprocessed and support comments, but these do not.
1117 json_no_comments_patterns = [
1118 '^testing/',
1120 # Only run IDL checker on files in these directories.
1121 idl_included_patterns = [
1122 '^chrome/common/extensions/api/',
1123 '^extensions/common/api/',
1126 def get_action(affected_file):
1127 filename = affected_file.LocalPath()
1128 return actions.get(input_api.os_path.splitext(filename)[1])
1130 def MatchesFile(patterns, path):
1131 for pattern in patterns:
1132 if input_api.re.search(pattern, path):
1133 return True
1134 return False
1136 def FilterFile(affected_file):
1137 action = get_action(affected_file)
1138 if not action:
1139 return False
1140 path = affected_file.LocalPath()
1142 if MatchesFile(excluded_patterns, path):
1143 return False
1145 if (action == _GetIDLParseError and
1146 not MatchesFile(idl_included_patterns, path)):
1147 return False
1148 return True
1150 results = []
1151 for affected_file in input_api.AffectedFiles(
1152 file_filter=FilterFile, include_deletes=False):
1153 action = get_action(affected_file)
1154 kwargs = {}
1155 if (action == _GetJSONParseError and
1156 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1157 kwargs['eat_comments'] = False
1158 parse_error = action(input_api,
1159 affected_file.AbsoluteLocalPath(),
1160 **kwargs)
1161 if parse_error:
1162 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1163 (affected_file.LocalPath(), parse_error)))
1164 return results
1167 def _CheckJavaStyle(input_api, output_api):
1168 """Runs checkstyle on changed java files and returns errors if any exist."""
1169 original_sys_path = sys.path
1170 try:
1171 sys.path = sys.path + [input_api.os_path.join(
1172 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1173 import checkstyle
1174 finally:
1175 # Restore sys.path to what it was before.
1176 sys.path = original_sys_path
1178 return checkstyle.RunCheckstyle(
1179 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml')
1182 _DEPRECATED_CSS = [
1183 # Values
1184 ( "-webkit-box", "flex" ),
1185 ( "-webkit-inline-box", "inline-flex" ),
1186 ( "-webkit-flex", "flex" ),
1187 ( "-webkit-inline-flex", "inline-flex" ),
1188 ( "-webkit-min-content", "min-content" ),
1189 ( "-webkit-max-content", "max-content" ),
1191 # Properties
1192 ( "-webkit-background-clip", "background-clip" ),
1193 ( "-webkit-background-origin", "background-origin" ),
1194 ( "-webkit-background-size", "background-size" ),
1195 ( "-webkit-box-shadow", "box-shadow" ),
1197 # Functions
1198 ( "-webkit-gradient", "gradient" ),
1199 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1200 ( "-webkit-linear-gradient", "linear-gradient" ),
1201 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1202 ( "-webkit-radial-gradient", "radial-gradient" ),
1203 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1206 def _CheckNoDeprecatedCSS(input_api, output_api):
1207 """ Make sure that we don't use deprecated CSS
1208 properties, functions or values. Our external
1209 documentation is ignored by the hooks as it
1210 needs to be consumed by WebKit. """
1211 results = []
1212 file_inclusion_pattern = (r".+\.css$")
1213 black_list = (_EXCLUDED_PATHS +
1214 _TEST_CODE_EXCLUDED_PATHS +
1215 input_api.DEFAULT_BLACK_LIST +
1216 (r"^chrome/common/extensions/docs",
1217 r"^chrome/docs",
1218 r"^native_client_sdk"))
1219 file_filter = lambda f: input_api.FilterSourceFile(
1220 f, white_list=file_inclusion_pattern, black_list=black_list)
1221 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1222 for line_num, line in fpath.ChangedContents():
1223 for (deprecated_value, value) in _DEPRECATED_CSS:
1224 if input_api.re.search(deprecated_value, line):
1225 results.append(output_api.PresubmitError(
1226 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1227 (fpath.LocalPath(), line_num, deprecated_value, value)))
1228 return results
1230 def _CommonChecks(input_api, output_api):
1231 """Checks common to both upload and commit."""
1232 results = []
1233 results.extend(input_api.canned_checks.PanProjectChecks(
1234 input_api, output_api,
1235 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1236 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1237 results.extend(
1238 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1239 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1240 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1241 results.extend(_CheckNoNewWStrings(input_api, output_api))
1242 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1243 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1244 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1245 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1246 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1247 results.extend(_CheckFilePermissions(input_api, output_api))
1248 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1249 results.extend(_CheckIncludeOrder(input_api, output_api))
1250 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1251 results.extend(_CheckPatchFiles(input_api, output_api))
1252 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1253 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1254 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1255 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1256 results.extend(
1257 input_api.canned_checks.CheckChangeHasNoTabs(
1258 input_api,
1259 output_api,
1260 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1261 results.extend(_CheckSpamLogging(input_api, output_api))
1262 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1263 results.extend(_CheckCygwinShell(input_api, output_api))
1264 results.extend(_CheckUserActionUpdate(input_api, output_api))
1265 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1266 results.extend(_CheckParseErrors(input_api, output_api))
1268 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1269 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1270 input_api, output_api,
1271 input_api.PresubmitLocalPath(),
1272 whitelist=[r'^PRESUBMIT_test\.py$']))
1273 return results
1276 def _CheckSubversionConfig(input_api, output_api):
1277 """Verifies the subversion config file is correctly setup.
1279 Checks that autoprops are enabled, returns an error otherwise.
1281 join = input_api.os_path.join
1282 if input_api.platform == 'win32':
1283 appdata = input_api.environ.get('APPDATA', '')
1284 if not appdata:
1285 return [output_api.PresubmitError('%APPDATA% is not configured.')]
1286 path = join(appdata, 'Subversion', 'config')
1287 else:
1288 home = input_api.environ.get('HOME', '')
1289 if not home:
1290 return [output_api.PresubmitError('$HOME is not configured.')]
1291 path = join(home, '.subversion', 'config')
1293 error_msg = (
1294 'Please look at http://dev.chromium.org/developers/coding-style to\n'
1295 'configure your subversion configuration file. This enables automatic\n'
1296 'properties to simplify the project maintenance.\n'
1297 'Pro-tip: just download and install\n'
1298 'http://src.chromium.org/viewvc/chrome/trunk/tools/build/slave/config\n')
1300 try:
1301 lines = open(path, 'r').read().splitlines()
1302 # Make sure auto-props is enabled and check for 2 Chromium standard
1303 # auto-prop.
1304 if (not '*.cc = svn:eol-style=LF' in lines or
1305 not '*.pdf = svn:mime-type=application/pdf' in lines or
1306 not 'enable-auto-props = yes' in lines):
1307 return [
1308 output_api.PresubmitNotifyResult(
1309 'It looks like you have not configured your subversion config '
1310 'file or it is not up-to-date.\n' + error_msg)
1312 except (OSError, IOError):
1313 return [
1314 output_api.PresubmitNotifyResult(
1315 'Can\'t find your subversion config file.\n' + error_msg)
1317 return []
1320 def _CheckAuthorizedAuthor(input_api, output_api):
1321 """For non-googler/chromites committers, verify the author's email address is
1322 in AUTHORS.
1324 # TODO(maruel): Add it to input_api?
1325 import fnmatch
1327 author = input_api.change.author_email
1328 if not author:
1329 input_api.logging.info('No author, skipping AUTHOR check')
1330 return []
1331 authors_path = input_api.os_path.join(
1332 input_api.PresubmitLocalPath(), 'AUTHORS')
1333 valid_authors = (
1334 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1335 for line in open(authors_path))
1336 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1337 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1338 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1339 return [output_api.PresubmitPromptWarning(
1340 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1341 '\n'
1342 'http://www.chromium.org/developers/contributing-code and read the '
1343 '"Legal" section\n'
1344 'If you are a chromite, verify the contributor signed the CLA.') %
1345 author)]
1346 return []
1349 def _CheckPatchFiles(input_api, output_api):
1350 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1351 if f.LocalPath().endswith(('.orig', '.rej'))]
1352 if problems:
1353 return [output_api.PresubmitError(
1354 "Don't commit .rej and .orig files.", problems)]
1355 else:
1356 return []
1359 def _DidYouMeanOSMacro(bad_macro):
1360 try:
1361 return {'A': 'OS_ANDROID',
1362 'B': 'OS_BSD',
1363 'C': 'OS_CHROMEOS',
1364 'F': 'OS_FREEBSD',
1365 'L': 'OS_LINUX',
1366 'M': 'OS_MACOSX',
1367 'N': 'OS_NACL',
1368 'O': 'OS_OPENBSD',
1369 'P': 'OS_POSIX',
1370 'S': 'OS_SOLARIS',
1371 'W': 'OS_WIN'}[bad_macro[3].upper()]
1372 except KeyError:
1373 return ''
1376 def _CheckForInvalidOSMacrosInFile(input_api, f):
1377 """Check for sensible looking, totally invalid OS macros."""
1378 preprocessor_statement = input_api.re.compile(r'^\s*#')
1379 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1380 results = []
1381 for lnum, line in f.ChangedContents():
1382 if preprocessor_statement.search(line):
1383 for match in os_macro.finditer(line):
1384 if not match.group(1) in _VALID_OS_MACROS:
1385 good = _DidYouMeanOSMacro(match.group(1))
1386 did_you_mean = ' (did you mean %s?)' % good if good else ''
1387 results.append(' %s:%d %s%s' % (f.LocalPath(),
1388 lnum,
1389 match.group(1),
1390 did_you_mean))
1391 return results
1394 def _CheckForInvalidOSMacros(input_api, output_api):
1395 """Check all affected files for invalid OS macros."""
1396 bad_macros = []
1397 for f in input_api.AffectedFiles():
1398 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1399 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1401 if not bad_macros:
1402 return []
1404 return [output_api.PresubmitError(
1405 'Possibly invalid OS macro[s] found. Please fix your code\n'
1406 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1409 def CheckChangeOnUpload(input_api, output_api):
1410 results = []
1411 results.extend(_CommonChecks(input_api, output_api))
1412 results.extend(_CheckJavaStyle(input_api, output_api))
1413 return results
1416 def GetTryServerMasterForBot(bot):
1417 """Returns the Try Server master for the given bot.
1419 Assumes that most Try Servers are on the tryserver.chromium master."""
1420 non_default_master_map = {
1421 'linux_gpu': 'tryserver.chromium.gpu',
1422 'mac_gpu': 'tryserver.chromium.gpu',
1423 'win_gpu': 'tryserver.chromium.gpu',
1425 return non_default_master_map.get(bot, 'tryserver.chromium')
1428 def GetDefaultTryConfigs(bots=None):
1429 """Returns a list of ('bot', set(['tests']), optionally filtered by [bots].
1431 To add tests to this list, they MUST be in the the corresponding master's
1432 gatekeeper config. For example, anything on master.chromium would be closed by
1433 tools/build/masters/master.chromium/master_gatekeeper_cfg.py.
1435 If 'bots' is specified, will only return configurations for bots in that list.
1438 standard_tests = [
1439 'base_unittests',
1440 'browser_tests',
1441 'cacheinvalidation_unittests',
1442 'check_deps',
1443 'check_deps2git',
1444 'content_browsertests',
1445 'content_unittests',
1446 'crypto_unittests',
1447 'gpu_unittests',
1448 'interactive_ui_tests',
1449 'ipc_tests',
1450 'jingle_unittests',
1451 'media_unittests',
1452 'net_unittests',
1453 'ppapi_unittests',
1454 'printing_unittests',
1455 'sql_unittests',
1456 'sync_unit_tests',
1457 'unit_tests',
1458 # Broken in release.
1459 #'url_unittests',
1460 #'webkit_unit_tests',
1463 builders_and_tests = {
1464 # TODO(maruel): Figure out a way to run 'sizes' where people can
1465 # effectively update the perf expectation correctly. This requires a
1466 # clobber=True build running 'sizes'. 'sizes' is not accurate with
1467 # incremental build. Reference:
1468 # http://chromium.org/developers/tree-sheriffs/perf-sheriffs.
1469 # TODO(maruel): An option would be to run 'sizes' but not count a failure
1470 # of this step as a try job failure.
1471 'android_aosp': ['compile'],
1472 'android_chromium_gn_compile_rel': ['compile'],
1473 'android_clang_dbg': ['slave_steps'],
1474 'android_dbg': ['slave_steps'],
1475 'cros_x86': ['defaulttests'],
1476 'ios_dbg_simulator': [
1477 'compile',
1478 'base_unittests',
1479 'content_unittests',
1480 'crypto_unittests',
1481 'url_unittests',
1482 'net_unittests',
1483 'sql_unittests',
1484 'ui_unittests',
1486 'ios_rel_device': ['compile'],
1487 'linux_asan': ['compile'],
1488 'mac_asan': ['compile'],
1489 #TODO(stip): Change the name of this builder to reflect that it's release.
1490 'linux_gtk': standard_tests,
1491 'linux_chromeos_asan': ['compile'],
1492 'linux_chromium_chromeos_clang_dbg': ['defaulttests'],
1493 'linux_chromium_chromeos_rel': ['defaulttests'],
1494 'linux_chromium_compile_dbg': ['defaulttests'],
1495 'linux_chromium_gn_rel': ['defaulttests'],
1496 'linux_chromium_rel': ['defaulttests'],
1497 'linux_chromium_clang_dbg': ['defaulttests'],
1498 'linux_gpu': ['defaulttests'],
1499 'linux_nacl_sdk_build': ['compile'],
1500 'linux_rel': [
1501 'telemetry_perf_unittests',
1502 'telemetry_unittests',
1504 'mac_chromium_compile_dbg': ['defaulttests'],
1505 'mac_chromium_rel': ['defaulttests'],
1506 'mac_gpu': ['defaulttests'],
1507 'mac_nacl_sdk_build': ['compile'],
1508 'mac_rel': [
1509 'telemetry_perf_unittests',
1510 'telemetry_unittests',
1512 'win': ['compile'],
1513 'win_chromium_compile_dbg': ['defaulttests'],
1514 'win_chromium_dbg': ['defaulttests'],
1515 'win_chromium_rel': ['defaulttests'],
1516 'win_chromium_x64_rel': ['defaulttests'],
1517 'win_gpu': ['defaulttests'],
1518 'win_nacl_sdk_build': ['compile'],
1519 'win_rel': standard_tests + [
1520 'app_list_unittests',
1521 'ash_unittests',
1522 'aura_unittests',
1523 'cc_unittests',
1524 'chrome_elf_unittests',
1525 'chromedriver_unittests',
1526 'components_unittests',
1527 'compositor_unittests',
1528 'events_unittests',
1529 'gfx_unittests',
1530 'google_apis_unittests',
1531 'installer_util_unittests',
1532 'test_mini_installer',
1533 'nacl_integration',
1534 'remoting_unittests',
1535 'sync_integration_tests',
1536 'telemetry_perf_unittests',
1537 'telemetry_unittests',
1538 'views_unittests',
1540 'win_x64_rel': [
1541 'base_unittests',
1545 swarm_enabled_builders = (
1546 # http://crbug.com/354263
1547 # 'linux_rel',
1548 # 'mac_rel',
1549 # 'win_rel',
1552 swarm_enabled_tests = (
1553 'base_unittests',
1554 'browser_tests',
1555 'interactive_ui_tests',
1556 'net_unittests',
1557 'unit_tests',
1560 for bot in builders_and_tests:
1561 if bot in swarm_enabled_builders:
1562 builders_and_tests[bot] = [x + '_swarm' if x in swarm_enabled_tests else x
1563 for x in builders_and_tests[bot]]
1565 if bots:
1566 filtered_builders_and_tests = dict((bot, set(builders_and_tests[bot]))
1567 for bot in bots)
1568 else:
1569 filtered_builders_and_tests = dict(
1570 (bot, set(tests))
1571 for bot, tests in builders_and_tests.iteritems())
1573 # Build up the mapping from tryserver master to bot/test.
1574 out = dict()
1575 for bot, tests in filtered_builders_and_tests.iteritems():
1576 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1577 return out
1580 def CheckChangeOnCommit(input_api, output_api):
1581 results = []
1582 results.extend(_CommonChecks(input_api, output_api))
1583 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1584 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1585 # input_api, output_api, sources))
1586 # Make sure the tree is 'open'.
1587 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1588 input_api,
1589 output_api,
1590 json_url='http://chromium-status.appspot.com/current?format=json'))
1592 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1593 input_api, output_api))
1594 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1595 input_api, output_api))
1596 results.extend(_CheckSubversionConfig(input_api, output_api))
1597 return results
1600 def GetPreferredTryMasters(project, change):
1601 files = change.LocalPaths()
1603 if not files or all(re.search(r'[\\/]OWNERS$', f) for f in files):
1604 return {}
1606 if all(re.search('\.(m|mm)$|(^|[/_])mac[/_.]', f) for f in files):
1607 return GetDefaultTryConfigs([
1608 'mac_chromium_compile_dbg',
1609 'mac_chromium_rel',
1611 if all(re.search('(^|[/_])win[/_.]', f) for f in files):
1612 return GetDefaultTryConfigs(['win_chromium_dbg', 'win_chromium_rel'])
1613 if all(re.search('(^|[/_])android[/_.]', f) for f in files):
1614 return GetDefaultTryConfigs([
1615 'android_aosp',
1616 'android_clang_dbg',
1617 'android_dbg',
1619 if all(re.search('[/_]ios[/_.]', f) for f in files):
1620 return GetDefaultTryConfigs(['ios_rel_device', 'ios_dbg_simulator'])
1622 builders = [
1623 'android_chromium_gn_compile_rel',
1624 'android_clang_dbg',
1625 'android_dbg',
1626 'ios_dbg_simulator',
1627 'ios_rel_device',
1628 'linux_chromium_chromeos_rel',
1629 'linux_chromium_clang_dbg',
1630 'linux_chromium_gn_rel',
1631 'linux_chromium_rel',
1632 'linux_gpu',
1633 'mac_chromium_compile_dbg',
1634 'mac_chromium_rel',
1635 'mac_gpu',
1636 'win_chromium_compile_dbg',
1637 'win_chromium_rel',
1638 'win_chromium_x64_rel',
1639 'win_gpu',
1642 # Match things like path/aura/file.cc and path/file_aura.cc.
1643 # Same for chromeos.
1644 if any(re.search('[/_](aura|chromeos)', f) for f in files):
1645 builders.extend([
1646 'linux_chromeos_asan',
1647 'linux_chromium_chromeos_clang_dbg'
1650 # If there are gyp changes to base, build, or chromeos, run a full cros build
1651 # in addition to the shorter linux_chromeos build. Changes to high level gyp
1652 # files have a much higher chance of breaking the cros build, which is
1653 # differnt from the linux_chromeos build that most chrome developers test
1654 # with.
1655 if any(re.search('^(base|build|chromeos).*\.gypi?$', f) for f in files):
1656 builders.extend(['cros_x86'])
1658 # The AOSP bot doesn't build the chrome/ layer, so ignore any changes to it
1659 # unless they're .gyp(i) files as changes to those files can break the gyp
1660 # step on that bot.
1661 if (not all(re.search('^chrome', f) for f in files) or
1662 any(re.search('\.gypi?$', f) for f in files)):
1663 builders.extend(['android_aosp'])
1665 return GetDefaultTryConfigs(builders)