Re-size frame only for printing selection
[chromium-blink-merge.git] / PRESUBMIT.py
blob0ce83a6874d3ebda049b4ca97e800b0c9288074f
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 """Top-level presubmit script for Chromium.
7 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
8 for more details about the presubmit API built into depot_tools.
9 """
12 _EXCLUDED_PATHS = (
13 r"^breakpad[\\\/].*",
14 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
15 r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
16 r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
17 r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
18 r"^skia[\\\/].*",
19 r"^v8[\\\/].*",
20 r".*MakeFile$",
21 r".+_autogen\.h$",
22 r".+[\\\/]pnacl_shim\.c$",
23 r"^gpu[\\\/]config[\\\/].*_list_json\.cc$",
24 r"^chrome[\\\/]browser[\\\/]resources[\\\/]pdf[\\\/]index.js"
27 # The NetscapePlugIn library is excluded from pan-project as it will soon
28 # be deleted together with the rest of the NPAPI and it's not worthwhile to
29 # update the coding style until then.
30 _TESTRUNNER_PATHS = (
31 r"^content[\\\/]shell[\\\/]tools[\\\/]plugin[\\\/].*",
34 # Fragment of a regular expression that matches C++ and Objective-C++
35 # implementation files.
36 _IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
38 # Regular expression that matches code only used for test binaries
39 # (best effort).
40 _TEST_CODE_EXCLUDED_PATHS = (
41 r'.*[\\\/](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
42 r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
43 r'.+_(api|browser|kif|perf|pixel|unit|ui)?test(_[a-z]+)?%s' %
44 _IMPLEMENTATION_EXTENSIONS,
45 r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
46 r'.*[\\\/](test|tool(s)?)[\\\/].*',
47 # content_shell is used for running layout tests.
48 r'content[\\\/]shell[\\\/].*',
49 # At request of folks maintaining this folder.
50 r'chrome[\\\/]browser[\\\/]automation[\\\/].*',
51 # Non-production example code.
52 r'mojo[\\\/]examples[\\\/].*',
53 # Launcher for running iOS tests on the simulator.
54 r'testing[\\\/]iossim[\\\/]iossim\.mm$',
57 _TEST_ONLY_WARNING = (
58 'You might be calling functions intended only for testing from\n'
59 'production code. It is OK to ignore this warning if you know what\n'
60 'you are doing, as the heuristics used to detect the situation are\n'
61 'not perfect. The commit queue will not block on this warning.')
64 _INCLUDE_ORDER_WARNING = (
65 'Your #include order seems to be broken. Remember to use the right '
66 'collation (LC_COLLATE=C) and check https://google-styleguide.googlecode'
67 '.com/svn/trunk/cppguide.html#Names_and_Order_of_Includes')
69 _BANNED_OBJC_FUNCTIONS = (
71 'addTrackingRect:',
73 'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
74 'prohibited. Please use CrTrackingArea instead.',
75 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
77 False,
80 r'/NSTrackingArea\W',
82 'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
83 'instead.',
84 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
86 False,
89 'convertPointFromBase:',
91 'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
92 'Please use |convertPoint:(point) fromView:nil| instead.',
93 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
95 True,
98 'convertPointToBase:',
100 'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
101 'Please use |convertPoint:(point) toView:nil| instead.',
102 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
104 True,
107 'convertRectFromBase:',
109 'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
110 'Please use |convertRect:(point) fromView:nil| instead.',
111 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
113 True,
116 'convertRectToBase:',
118 'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
119 'Please use |convertRect:(point) toView:nil| instead.',
120 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
122 True,
125 'convertSizeFromBase:',
127 'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
128 'Please use |convertSize:(point) fromView:nil| instead.',
129 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
131 True,
134 'convertSizeToBase:',
136 'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
137 'Please use |convertSize:(point) toView:nil| instead.',
138 'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
140 True,
145 _BANNED_CPP_FUNCTIONS = (
146 # Make sure that gtest's FRIEND_TEST() macro is not used; the
147 # FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
148 # used instead since that allows for FLAKY_ and DISABLED_ prefixes.
150 'FRIEND_TEST(',
152 'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
153 'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
155 False,
159 'ScopedAllowIO',
161 'New code should not use ScopedAllowIO. Post a task to the blocking',
162 'pool or the FILE thread instead.',
164 True,
166 r"^base[\\\/]process[\\\/]process_metrics_linux\.cc$",
167 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]boot_times_recorder\.cc$",
168 r"^chrome[\\\/]browser[\\\/]chromeos[\\\/]"
169 "customization_document_browsertest\.cc$",
170 r"^components[\\\/]crash[\\\/]app[\\\/]breakpad_mac\.mm$",
171 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_browser_main\.cc$",
172 r"^content[\\\/]shell[\\\/]browser[\\\/]shell_message_filter\.cc$",
173 r"^mojo[\\\/]edk[\\\/]embedder[\\\/]" +
174 r"simple_platform_shared_buffer_posix\.cc$",
175 r"^net[\\\/]disk_cache[\\\/]cache_util\.cc$",
176 r"^net[\\\/]url_request[\\\/]test_url_fetcher_factory\.cc$",
177 r"^ui[\\\/]ozone[\\\/]platform[\\\/]drm[\\\/]host[\\\/]"
178 "drm_native_display_delegate\.cc$",
182 'SkRefPtr',
184 'The use of SkRefPtr is prohibited. ',
185 'Please use skia::RefPtr instead.'
187 True,
191 'SkAutoRef',
193 'The indirect use of SkRefPtr via SkAutoRef is prohibited. ',
194 'Please use skia::RefPtr instead.'
196 True,
200 'SkAutoTUnref',
202 'The use of SkAutoTUnref is dangerous because it implicitly ',
203 'converts to a raw pointer. Please use skia::RefPtr instead.'
205 True,
209 'SkAutoUnref',
211 'The indirect use of SkAutoTUnref through SkAutoUnref is dangerous ',
212 'because it implicitly converts to a raw pointer. ',
213 'Please use skia::RefPtr instead.'
215 True,
219 r'/HANDLE_EINTR\(.*close',
221 'HANDLE_EINTR(close) is invalid. If close fails with EINTR, the file',
222 'descriptor will be closed, and it is incorrect to retry the close.',
223 'Either call close directly and ignore its return value, or wrap close',
224 'in IGNORE_EINTR to use its return value. See http://crbug.com/269623'
226 True,
230 r'/IGNORE_EINTR\((?!.*close)',
232 'IGNORE_EINTR is only valid when wrapping close. To wrap other system',
233 'calls, use HANDLE_EINTR. See http://crbug.com/269623',
235 True,
237 # Files that #define IGNORE_EINTR.
238 r'^base[\\\/]posix[\\\/]eintr_wrapper\.h$',
239 r'^ppapi[\\\/]tests[\\\/]test_broker\.cc$',
243 r'/v8::Extension\(',
245 'Do not introduce new v8::Extensions into the code base, use',
246 'gin::Wrappable instead. See http://crbug.com/334679',
248 True,
250 r'extensions[\\\/]renderer[\\\/]safe_builtins\.*',
255 _IPC_ENUM_TRAITS_DEPRECATED = (
256 'You are using IPC_ENUM_TRAITS() in your code. It has been deprecated.\n'
257 'See http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc')
260 _VALID_OS_MACROS = (
261 # Please keep sorted.
262 'OS_ANDROID',
263 'OS_ANDROID_HOST',
264 'OS_BSD',
265 'OS_CAT', # For testing.
266 'OS_CHROMEOS',
267 'OS_FREEBSD',
268 'OS_IOS',
269 'OS_LINUX',
270 'OS_MACOSX',
271 'OS_NACL',
272 'OS_NACL_NONSFI',
273 'OS_NACL_SFI',
274 'OS_OPENBSD',
275 'OS_POSIX',
276 'OS_QNX',
277 'OS_SOLARIS',
278 'OS_WIN',
282 def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
283 """Attempts to prevent use of functions intended only for testing in
284 non-testing code. For now this is just a best-effort implementation
285 that ignores header files and may have some false positives. A
286 better implementation would probably need a proper C++ parser.
288 # We only scan .cc files and the like, as the declaration of
289 # for-testing functions in header files are hard to distinguish from
290 # calls to such functions without a proper C++ parser.
291 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
293 base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
294 inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
295 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_function_pattern)
296 exclusion_pattern = input_api.re.compile(
297 r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
298 base_function_pattern, base_function_pattern))
300 def FilterFile(affected_file):
301 black_list = (_EXCLUDED_PATHS +
302 _TEST_CODE_EXCLUDED_PATHS +
303 input_api.DEFAULT_BLACK_LIST)
304 return input_api.FilterSourceFile(
305 affected_file,
306 white_list=(file_inclusion_pattern, ),
307 black_list=black_list)
309 problems = []
310 for f in input_api.AffectedSourceFiles(FilterFile):
311 local_path = f.LocalPath()
312 for line_number, line in f.ChangedContents():
313 if (inclusion_pattern.search(line) and
314 not comment_pattern.search(line) and
315 not exclusion_pattern.search(line)):
316 problems.append(
317 '%s:%d\n %s' % (local_path, line_number, line.strip()))
319 if problems:
320 return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING, problems)]
321 else:
322 return []
325 def _CheckNoIOStreamInHeaders(input_api, output_api):
326 """Checks to make sure no .h files include <iostream>."""
327 files = []
328 pattern = input_api.re.compile(r'^#include\s*<iostream>',
329 input_api.re.MULTILINE)
330 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
331 if not f.LocalPath().endswith('.h'):
332 continue
333 contents = input_api.ReadFile(f)
334 if pattern.search(contents):
335 files.append(f)
337 if len(files):
338 return [ output_api.PresubmitError(
339 'Do not #include <iostream> in header files, since it inserts static '
340 'initialization into every file including the header. Instead, '
341 '#include <ostream>. See http://crbug.com/94794',
342 files) ]
343 return []
346 def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
347 """Checks to make sure no source files use UNIT_TEST"""
348 problems = []
349 for f in input_api.AffectedFiles():
350 if (not f.LocalPath().endswith(('.cc', '.mm'))):
351 continue
353 for line_num, line in f.ChangedContents():
354 if 'UNIT_TEST ' in line or line.endswith('UNIT_TEST'):
355 problems.append(' %s:%d' % (f.LocalPath(), line_num))
357 if not problems:
358 return []
359 return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
360 '\n'.join(problems))]
363 def _FindHistogramNameInLine(histogram_name, line):
364 """Tries to find a histogram name or prefix in a line."""
365 if not "affected-histogram" in line:
366 return histogram_name in line
367 # A histogram_suffixes tag type has an affected-histogram name as a prefix of
368 # the histogram_name.
369 if not '"' in line:
370 return False
371 histogram_prefix = line.split('\"')[1]
372 return histogram_prefix in histogram_name
375 def _CheckUmaHistogramChanges(input_api, output_api):
376 """Check that UMA histogram names in touched lines can still be found in other
377 lines of the patch or in histograms.xml. Note that this check would not catch
378 the reverse: changes in histograms.xml not matched in the code itself."""
379 touched_histograms = []
380 histograms_xml_modifications = []
381 pattern = input_api.re.compile('UMA_HISTOGRAM.*\("(.*)"')
382 for f in input_api.AffectedFiles():
383 # If histograms.xml itself is modified, keep the modified lines for later.
384 if f.LocalPath().endswith(('histograms.xml')):
385 histograms_xml_modifications = f.ChangedContents()
386 continue
387 if not f.LocalPath().endswith(('cc', 'mm', 'cpp')):
388 continue
389 for line_num, line in f.ChangedContents():
390 found = pattern.search(line)
391 if found:
392 touched_histograms.append([found.group(1), f, line_num])
394 # Search for the touched histogram names in the local modifications to
395 # histograms.xml, and, if not found, on the base histograms.xml file.
396 unmatched_histograms = []
397 for histogram_info in touched_histograms:
398 histogram_name_found = False
399 for line_num, line in histograms_xml_modifications:
400 histogram_name_found = _FindHistogramNameInLine(histogram_info[0], line)
401 if histogram_name_found:
402 break
403 if not histogram_name_found:
404 unmatched_histograms.append(histogram_info)
406 histograms_xml_path = 'tools/metrics/histograms/histograms.xml'
407 problems = []
408 if unmatched_histograms:
409 with open(histograms_xml_path) as histograms_xml:
410 for histogram_name, f, line_num in unmatched_histograms:
411 histograms_xml.seek(0)
412 histogram_name_found = False
413 for line in histograms_xml:
414 histogram_name_found = _FindHistogramNameInLine(histogram_name, line)
415 if histogram_name_found:
416 break
417 if not histogram_name_found:
418 problems.append(' [%s:%d] %s' %
419 (f.LocalPath(), line_num, histogram_name))
421 if not problems:
422 return []
423 return [output_api.PresubmitPromptWarning('Some UMA_HISTOGRAM lines have '
424 'been modified and the associated histogram name has no match in either '
425 '%s or the modifications of it:' % (histograms_xml_path), problems)]
428 def _CheckNoNewWStrings(input_api, output_api):
429 """Checks to make sure we don't introduce use of wstrings."""
430 problems = []
431 for f in input_api.AffectedFiles():
432 if (not f.LocalPath().endswith(('.cc', '.h')) or
433 f.LocalPath().endswith(('test.cc', '_win.cc', '_win.h')) or
434 '/win/' in f.LocalPath()):
435 continue
437 allowWString = False
438 for line_num, line in f.ChangedContents():
439 if 'presubmit: allow wstring' in line:
440 allowWString = True
441 elif not allowWString and 'wstring' in line:
442 problems.append(' %s:%d' % (f.LocalPath(), line_num))
443 allowWString = False
444 else:
445 allowWString = False
447 if not problems:
448 return []
449 return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
450 ' If you are calling a cross-platform API that accepts a wstring, '
451 'fix the API.\n' +
452 '\n'.join(problems))]
455 def _CheckNoDEPSGIT(input_api, output_api):
456 """Make sure .DEPS.git is never modified manually."""
457 if any(f.LocalPath().endswith('.DEPS.git') for f in
458 input_api.AffectedFiles()):
459 return [output_api.PresubmitError(
460 'Never commit changes to .DEPS.git. This file is maintained by an\n'
461 'automated system based on what\'s in DEPS and your changes will be\n'
462 'overwritten.\n'
463 'See https://sites.google.com/a/chromium.org/dev/developers/how-tos/get-the-code#Rolling_DEPS\n'
464 'for more information')]
465 return []
468 def _CheckValidHostsInDEPS(input_api, output_api):
469 """Checks that DEPS file deps are from allowed_hosts."""
470 # Run only if DEPS file has been modified to annoy fewer bystanders.
471 if all(f.LocalPath() != 'DEPS' for f in input_api.AffectedFiles()):
472 return []
473 # Outsource work to gclient verify
474 try:
475 input_api.subprocess.check_output(['gclient', 'verify'])
476 return []
477 except input_api.subprocess.CalledProcessError, error:
478 return [output_api.PresubmitError(
479 'DEPS file must have only git dependencies.',
480 long_text=error.output)]
483 def _CheckNoBannedFunctions(input_api, output_api):
484 """Make sure that banned functions are not used."""
485 warnings = []
486 errors = []
488 file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
489 for f in input_api.AffectedFiles(file_filter=file_filter):
490 for line_num, line in f.ChangedContents():
491 for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
492 matched = False
493 if func_name[0:1] == '/':
494 regex = func_name[1:]
495 if input_api.re.search(regex, line):
496 matched = True
497 elif func_name in line:
498 matched = True
499 if matched:
500 problems = warnings;
501 if error:
502 problems = errors;
503 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
504 for message_line in message:
505 problems.append(' %s' % message_line)
507 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
508 for f in input_api.AffectedFiles(file_filter=file_filter):
509 for line_num, line in f.ChangedContents():
510 for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
511 def IsBlacklisted(affected_file, blacklist):
512 local_path = affected_file.LocalPath()
513 for item in blacklist:
514 if input_api.re.match(item, local_path):
515 return True
516 return False
517 if IsBlacklisted(f, excluded_paths):
518 continue
519 matched = False
520 if func_name[0:1] == '/':
521 regex = func_name[1:]
522 if input_api.re.search(regex, line):
523 matched = True
524 elif func_name in line:
525 matched = True
526 if matched:
527 problems = warnings;
528 if error:
529 problems = errors;
530 problems.append(' %s:%d:' % (f.LocalPath(), line_num))
531 for message_line in message:
532 problems.append(' %s' % message_line)
534 result = []
535 if (warnings):
536 result.append(output_api.PresubmitPromptWarning(
537 'Banned functions were used.\n' + '\n'.join(warnings)))
538 if (errors):
539 result.append(output_api.PresubmitError(
540 'Banned functions were used.\n' + '\n'.join(errors)))
541 return result
544 def _CheckNoPragmaOnce(input_api, output_api):
545 """Make sure that banned functions are not used."""
546 files = []
547 pattern = input_api.re.compile(r'^#pragma\s+once',
548 input_api.re.MULTILINE)
549 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
550 if not f.LocalPath().endswith('.h'):
551 continue
552 contents = input_api.ReadFile(f)
553 if pattern.search(contents):
554 files.append(f)
556 if files:
557 return [output_api.PresubmitError(
558 'Do not use #pragma once in header files.\n'
559 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
560 files)]
561 return []
564 def _CheckNoTrinaryTrueFalse(input_api, output_api):
565 """Checks to make sure we don't introduce use of foo ? true : false."""
566 problems = []
567 pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
568 for f in input_api.AffectedFiles():
569 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
570 continue
572 for line_num, line in f.ChangedContents():
573 if pattern.match(line):
574 problems.append(' %s:%d' % (f.LocalPath(), line_num))
576 if not problems:
577 return []
578 return [output_api.PresubmitPromptWarning(
579 'Please consider avoiding the "? true : false" pattern if possible.\n' +
580 '\n'.join(problems))]
583 def _CheckUnwantedDependencies(input_api, output_api):
584 """Runs checkdeps on #include statements added in this
585 change. Breaking - rules is an error, breaking ! rules is a
586 warning.
588 import sys
589 # We need to wait until we have an input_api object and use this
590 # roundabout construct to import checkdeps because this file is
591 # eval-ed and thus doesn't have __file__.
592 original_sys_path = sys.path
593 try:
594 sys.path = sys.path + [input_api.os_path.join(
595 input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
596 import checkdeps
597 from cpp_checker import CppChecker
598 from rules import Rule
599 finally:
600 # Restore sys.path to what it was before.
601 sys.path = original_sys_path
603 added_includes = []
604 for f in input_api.AffectedFiles():
605 if not CppChecker.IsCppFile(f.LocalPath()):
606 continue
608 changed_lines = [line for line_num, line in f.ChangedContents()]
609 added_includes.append([f.LocalPath(), changed_lines])
611 deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
613 error_descriptions = []
614 warning_descriptions = []
615 for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
616 added_includes):
617 description_with_path = '%s\n %s' % (path, rule_description)
618 if rule_type == Rule.DISALLOW:
619 error_descriptions.append(description_with_path)
620 else:
621 warning_descriptions.append(description_with_path)
623 results = []
624 if error_descriptions:
625 results.append(output_api.PresubmitError(
626 'You added one or more #includes that violate checkdeps rules.',
627 error_descriptions))
628 if warning_descriptions:
629 results.append(output_api.PresubmitPromptOrNotify(
630 'You added one or more #includes of files that are temporarily\n'
631 'allowed but being removed. Can you avoid introducing the\n'
632 '#include? See relevant DEPS file(s) for details and contacts.',
633 warning_descriptions))
634 return results
637 def _CheckFilePermissions(input_api, output_api):
638 """Check that all files have their permissions properly set."""
639 if input_api.platform == 'win32':
640 return []
641 args = [input_api.python_executable, 'tools/checkperms/checkperms.py',
642 '--root', input_api.change.RepositoryRoot()]
643 for f in input_api.AffectedFiles():
644 args += ['--file', f.LocalPath()]
645 checkperms = input_api.subprocess.Popen(args,
646 stdout=input_api.subprocess.PIPE)
647 errors = checkperms.communicate()[0].strip()
648 if errors:
649 return [output_api.PresubmitError('checkperms.py failed.',
650 errors.splitlines())]
651 return []
654 def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
655 """Makes sure we don't include ui/aura/window_property.h
656 in header files.
658 pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
659 errors = []
660 for f in input_api.AffectedFiles():
661 if not f.LocalPath().endswith('.h'):
662 continue
663 for line_num, line in f.ChangedContents():
664 if pattern.match(line):
665 errors.append(' %s:%d' % (f.LocalPath(), line_num))
667 results = []
668 if errors:
669 results.append(output_api.PresubmitError(
670 'Header files should not include ui/aura/window_property.h', errors))
671 return results
674 def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
675 """Checks that the lines in scope occur in the right order.
677 1. C system files in alphabetical order
678 2. C++ system files in alphabetical order
679 3. Project's .h files
682 c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
683 cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
684 custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
686 C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
688 state = C_SYSTEM_INCLUDES
690 previous_line = ''
691 previous_line_num = 0
692 problem_linenums = []
693 for line_num, line in scope:
694 if c_system_include_pattern.match(line):
695 if state != C_SYSTEM_INCLUDES:
696 problem_linenums.append((line_num, previous_line_num))
697 elif previous_line and previous_line > line:
698 problem_linenums.append((line_num, previous_line_num))
699 elif cpp_system_include_pattern.match(line):
700 if state == C_SYSTEM_INCLUDES:
701 state = CPP_SYSTEM_INCLUDES
702 elif state == CUSTOM_INCLUDES:
703 problem_linenums.append((line_num, previous_line_num))
704 elif previous_line and previous_line > line:
705 problem_linenums.append((line_num, previous_line_num))
706 elif custom_include_pattern.match(line):
707 if state != CUSTOM_INCLUDES:
708 state = CUSTOM_INCLUDES
709 elif previous_line and previous_line > line:
710 problem_linenums.append((line_num, previous_line_num))
711 else:
712 problem_linenums.append(line_num)
713 previous_line = line
714 previous_line_num = line_num
716 warnings = []
717 for (line_num, previous_line_num) in problem_linenums:
718 if line_num in changed_linenums or previous_line_num in changed_linenums:
719 warnings.append(' %s:%d' % (file_path, line_num))
720 return warnings
723 def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
724 """Checks the #include order for the given file f."""
726 system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
727 # Exclude the following includes from the check:
728 # 1) #include <.../...>, e.g., <sys/...> includes often need to appear in a
729 # specific order.
730 # 2) <atlbase.h>, "build/build_config.h"
731 excluded_include_pattern = input_api.re.compile(
732 r'\s*#include (\<.*/.*|\<atlbase\.h\>|"build/build_config.h")')
733 custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
734 # Match the final or penultimate token if it is xxxtest so we can ignore it
735 # when considering the special first include.
736 test_file_tag_pattern = input_api.re.compile(
737 r'_[a-z]+test(?=(_[a-zA-Z0-9]+)?\.)')
738 if_pattern = input_api.re.compile(
739 r'\s*#\s*(if|elif|else|endif|define|undef).*')
740 # Some files need specialized order of includes; exclude such files from this
741 # check.
742 uncheckable_includes_pattern = input_api.re.compile(
743 r'\s*#include '
744 '("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
746 contents = f.NewContents()
747 warnings = []
748 line_num = 0
750 # Handle the special first include. If the first include file is
751 # some/path/file.h, the corresponding including file can be some/path/file.cc,
752 # some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
753 # etc. It's also possible that no special first include exists.
754 # If the included file is some/path/file_platform.h the including file could
755 # also be some/path/file_xxxtest_platform.h.
756 including_file_base_name = test_file_tag_pattern.sub(
757 '', input_api.os_path.basename(f.LocalPath()))
759 for line in contents:
760 line_num += 1
761 if system_include_pattern.match(line):
762 # No special first include -> process the line again along with normal
763 # includes.
764 line_num -= 1
765 break
766 match = custom_include_pattern.match(line)
767 if match:
768 match_dict = match.groupdict()
769 header_basename = test_file_tag_pattern.sub(
770 '', input_api.os_path.basename(match_dict['FILE'])).replace('.h', '')
772 if header_basename not in including_file_base_name:
773 # No special first include -> process the line again along with normal
774 # includes.
775 line_num -= 1
776 break
778 # Split into scopes: Each region between #if and #endif is its own scope.
779 scopes = []
780 current_scope = []
781 for line in contents[line_num:]:
782 line_num += 1
783 if uncheckable_includes_pattern.match(line):
784 continue
785 if if_pattern.match(line):
786 scopes.append(current_scope)
787 current_scope = []
788 elif ((system_include_pattern.match(line) or
789 custom_include_pattern.match(line)) and
790 not excluded_include_pattern.match(line)):
791 current_scope.append((line_num, line))
792 scopes.append(current_scope)
794 for scope in scopes:
795 warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
796 changed_linenums))
797 return warnings
800 def _CheckIncludeOrder(input_api, output_api):
801 """Checks that the #include order is correct.
803 1. The corresponding header for source files.
804 2. C system files in alphabetical order
805 3. C++ system files in alphabetical order
806 4. Project's .h files in alphabetical order
808 Each region separated by #if, #elif, #else, #endif, #define and #undef follows
809 these rules separately.
811 def FileFilterIncludeOrder(affected_file):
812 black_list = (_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
813 return input_api.FilterSourceFile(affected_file, black_list=black_list)
815 warnings = []
816 for f in input_api.AffectedFiles(file_filter=FileFilterIncludeOrder):
817 if f.LocalPath().endswith(('.cc', '.h')):
818 changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
819 warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
821 results = []
822 if warnings:
823 results.append(output_api.PresubmitPromptOrNotify(_INCLUDE_ORDER_WARNING,
824 warnings))
825 return results
828 def _CheckForVersionControlConflictsInFile(input_api, f):
829 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
830 errors = []
831 for line_num, line in f.ChangedContents():
832 if pattern.match(line):
833 errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
834 return errors
837 def _CheckForVersionControlConflicts(input_api, output_api):
838 """Usually this is not intentional and will cause a compile failure."""
839 errors = []
840 for f in input_api.AffectedFiles():
841 errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
843 results = []
844 if errors:
845 results.append(output_api.PresubmitError(
846 'Version control conflict markers found, please resolve.', errors))
847 return results
850 def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
851 def FilterFile(affected_file):
852 """Filter function for use with input_api.AffectedSourceFiles,
853 below. This filters out everything except non-test files from
854 top-level directories that generally speaking should not hard-code
855 service URLs (e.g. src/android_webview/, src/content/ and others).
857 return input_api.FilterSourceFile(
858 affected_file,
859 white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
860 black_list=(_EXCLUDED_PATHS +
861 _TEST_CODE_EXCLUDED_PATHS +
862 input_api.DEFAULT_BLACK_LIST))
864 base_pattern = '"[^"]*google\.com[^"]*"'
865 comment_pattern = input_api.re.compile('//.*%s' % base_pattern)
866 pattern = input_api.re.compile(base_pattern)
867 problems = [] # items are (filename, line_number, line)
868 for f in input_api.AffectedSourceFiles(FilterFile):
869 for line_num, line in f.ChangedContents():
870 if not comment_pattern.search(line) and pattern.search(line):
871 problems.append((f.LocalPath(), line_num, line))
873 if problems:
874 return [output_api.PresubmitPromptOrNotify(
875 'Most layers below src/chrome/ should not hardcode service URLs.\n'
876 'Are you sure this is correct?',
877 [' %s:%d: %s' % (
878 problem[0], problem[1], problem[2]) for problem in problems])]
879 else:
880 return []
883 def _CheckNoAbbreviationInPngFileName(input_api, output_api):
884 """Makes sure there are no abbreviations in the name of PNG files.
885 The native_client_sdk directory is excluded because it has auto-generated PNG
886 files for documentation.
888 errors = []
889 white_list = (r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$',)
890 black_list = (r'^native_client_sdk[\\\/]',)
891 file_filter = lambda f: input_api.FilterSourceFile(
892 f, white_list=white_list, black_list=black_list)
893 for f in input_api.AffectedFiles(include_deletes=False,
894 file_filter=file_filter):
895 errors.append(' %s' % f.LocalPath())
897 results = []
898 if errors:
899 results.append(output_api.PresubmitError(
900 'The name of PNG files should not have abbreviations. \n'
901 'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
902 'Contact oshima@chromium.org if you have questions.', errors))
903 return results
906 def _FilesToCheckForIncomingDeps(re, changed_lines):
907 """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
908 a set of DEPS entries that we should look up.
910 For a directory (rather than a specific filename) we fake a path to
911 a specific filename by adding /DEPS. This is chosen as a file that
912 will seldom or never be subject to per-file include_rules.
914 # We ignore deps entries on auto-generated directories.
915 AUTO_GENERATED_DIRS = ['grit', 'jni']
917 # This pattern grabs the path without basename in the first
918 # parentheses, and the basename (if present) in the second. It
919 # relies on the simple heuristic that if there is a basename it will
920 # be a header file ending in ".h".
921 pattern = re.compile(
922 r"""['"]\+([^'"]+?)(/[a-zA-Z0-9_]+\.h)?['"].*""")
923 results = set()
924 for changed_line in changed_lines:
925 m = pattern.match(changed_line)
926 if m:
927 path = m.group(1)
928 if path.split('/')[0] not in AUTO_GENERATED_DIRS:
929 if m.group(2):
930 results.add('%s%s' % (path, m.group(2)))
931 else:
932 results.add('%s/DEPS' % path)
933 return results
936 def _CheckAddedDepsHaveTargetApprovals(input_api, output_api):
937 """When a dependency prefixed with + is added to a DEPS file, we
938 want to make sure that the change is reviewed by an OWNER of the
939 target file or directory, to avoid layering violations from being
940 introduced. This check verifies that this happens.
942 changed_lines = set()
943 for f in input_api.AffectedFiles():
944 filename = input_api.os_path.basename(f.LocalPath())
945 if filename == 'DEPS':
946 changed_lines |= set(line.strip()
947 for line_num, line
948 in f.ChangedContents())
949 if not changed_lines:
950 return []
952 virtual_depended_on_files = _FilesToCheckForIncomingDeps(input_api.re,
953 changed_lines)
954 if not virtual_depended_on_files:
955 return []
957 if input_api.is_committing:
958 if input_api.tbr:
959 return [output_api.PresubmitNotifyResult(
960 '--tbr was specified, skipping OWNERS check for DEPS additions')]
961 if not input_api.change.issue:
962 return [output_api.PresubmitError(
963 "DEPS approval by OWNERS check failed: this change has "
964 "no Rietveld issue number, so we can't check it for approvals.")]
965 output = output_api.PresubmitError
966 else:
967 output = output_api.PresubmitNotifyResult
969 owners_db = input_api.owners_db
970 owner_email, reviewers = input_api.canned_checks._RietveldOwnerAndReviewers(
971 input_api,
972 owners_db.email_regexp,
973 approval_needed=input_api.is_committing)
975 owner_email = owner_email or input_api.change.author_email
977 reviewers_plus_owner = set(reviewers)
978 if owner_email:
979 reviewers_plus_owner.add(owner_email)
980 missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
981 reviewers_plus_owner)
983 # We strip the /DEPS part that was added by
984 # _FilesToCheckForIncomingDeps to fake a path to a file in a
985 # directory.
986 def StripDeps(path):
987 start_deps = path.rfind('/DEPS')
988 if start_deps != -1:
989 return path[:start_deps]
990 else:
991 return path
992 unapproved_dependencies = ["'+%s'," % StripDeps(path)
993 for path in missing_files]
995 if unapproved_dependencies:
996 output_list = [
997 output('Missing LGTM from OWNERS of dependencies added to DEPS:\n %s' %
998 '\n '.join(sorted(unapproved_dependencies)))]
999 if not input_api.is_committing:
1000 suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1001 output_list.append(output(
1002 'Suggested missing target path OWNERS:\n %s' %
1003 '\n '.join(suggested_owners or [])))
1004 return output_list
1006 return []
1009 def _CheckSpamLogging(input_api, output_api):
1010 file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1011 black_list = (_EXCLUDED_PATHS +
1012 _TEST_CODE_EXCLUDED_PATHS +
1013 input_api.DEFAULT_BLACK_LIST +
1014 (r"^base[\\\/]logging\.h$",
1015 r"^base[\\\/]logging\.cc$",
1016 r"^chrome[\\\/]app[\\\/]chrome_main_delegate\.cc$",
1017 r"^chrome[\\\/]browser[\\\/]chrome_browser_main\.cc$",
1018 r"^chrome[\\\/]browser[\\\/]ui[\\\/]startup[\\\/]"
1019 r"startup_browser_creator\.cc$",
1020 r"^chrome[\\\/]installer[\\\/]setup[\\\/].*",
1021 r"chrome[\\\/]browser[\\\/]diagnostics[\\\/]" +
1022 r"diagnostics_writer\.cc$",
1023 r"^chrome_elf[\\\/]dll_hash[\\\/]dll_hash_main\.cc$",
1024 r"^chromecast[\\\/]",
1025 r"^cloud_print[\\\/]",
1026 r"^content[\\\/]common[\\\/]gpu[\\\/]client[\\\/]"
1027 r"gl_helper_benchmark\.cc$",
1028 r"^courgette[\\\/]courgette_tool\.cc$",
1029 r"^extensions[\\\/]renderer[\\\/]logging_native_handler\.cc$",
1030 r"^ipc[\\\/]ipc_logging\.cc$",
1031 r"^native_client_sdk[\\\/]",
1032 r"^remoting[\\\/]base[\\\/]logging\.h$",
1033 r"^remoting[\\\/]host[\\\/].*",
1034 r"^sandbox[\\\/]linux[\\\/].*",
1035 r"^tools[\\\/]",
1036 r"^ui[\\\/]aura[\\\/]bench[\\\/]bench_main\.cc$",
1037 r"^storage[\\\/]browser[\\\/]fileapi[\\\/]" +
1038 r"dump_file_system.cc$",))
1039 source_file_filter = lambda x: input_api.FilterSourceFile(
1040 x, white_list=(file_inclusion_pattern,), black_list=black_list)
1042 log_info = []
1043 printf = []
1045 for f in input_api.AffectedSourceFiles(source_file_filter):
1046 contents = input_api.ReadFile(f, 'rb')
1047 if input_api.re.search(r"\bD?LOG\s*\(\s*INFO\s*\)", contents):
1048 log_info.append(f.LocalPath())
1049 elif input_api.re.search(r"\bD?LOG_IF\s*\(\s*INFO\s*,", contents):
1050 log_info.append(f.LocalPath())
1052 if input_api.re.search(r"\bprintf\(", contents):
1053 printf.append(f.LocalPath())
1054 elif input_api.re.search(r"\bfprintf\((stdout|stderr)", contents):
1055 printf.append(f.LocalPath())
1057 if log_info:
1058 return [output_api.PresubmitError(
1059 'These files spam the console log with LOG(INFO):',
1060 items=log_info)]
1061 if printf:
1062 return [output_api.PresubmitError(
1063 'These files spam the console log with printf/fprintf:',
1064 items=printf)]
1065 return []
1068 def _CheckForAnonymousVariables(input_api, output_api):
1069 """These types are all expected to hold locks while in scope and
1070 so should never be anonymous (which causes them to be immediately
1071 destroyed)."""
1072 they_who_must_be_named = [
1073 'base::AutoLock',
1074 'base::AutoReset',
1075 'base::AutoUnlock',
1076 'SkAutoAlphaRestore',
1077 'SkAutoBitmapShaderInstall',
1078 'SkAutoBlitterChoose',
1079 'SkAutoBounderCommit',
1080 'SkAutoCallProc',
1081 'SkAutoCanvasRestore',
1082 'SkAutoCommentBlock',
1083 'SkAutoDescriptor',
1084 'SkAutoDisableDirectionCheck',
1085 'SkAutoDisableOvalCheck',
1086 'SkAutoFree',
1087 'SkAutoGlyphCache',
1088 'SkAutoHDC',
1089 'SkAutoLockColors',
1090 'SkAutoLockPixels',
1091 'SkAutoMalloc',
1092 'SkAutoMaskFreeImage',
1093 'SkAutoMutexAcquire',
1094 'SkAutoPathBoundsUpdate',
1095 'SkAutoPDFRelease',
1096 'SkAutoRasterClipValidate',
1097 'SkAutoRef',
1098 'SkAutoTime',
1099 'SkAutoTrace',
1100 'SkAutoUnref',
1102 anonymous = r'(%s)\s*[({]' % '|'.join(they_who_must_be_named)
1103 # bad: base::AutoLock(lock.get());
1104 # not bad: base::AutoLock lock(lock.get());
1105 bad_pattern = input_api.re.compile(anonymous)
1106 # good: new base::AutoLock(lock.get())
1107 good_pattern = input_api.re.compile(r'\bnew\s*' + anonymous)
1108 errors = []
1110 for f in input_api.AffectedFiles():
1111 if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
1112 continue
1113 for linenum, line in f.ChangedContents():
1114 if bad_pattern.search(line) and not good_pattern.search(line):
1115 errors.append('%s:%d' % (f.LocalPath(), linenum))
1117 if errors:
1118 return [output_api.PresubmitError(
1119 'These lines create anonymous variables that need to be named:',
1120 items=errors)]
1121 return []
1124 def _CheckCygwinShell(input_api, output_api):
1125 source_file_filter = lambda x: input_api.FilterSourceFile(
1126 x, white_list=(r'.+\.(gyp|gypi)$',))
1127 cygwin_shell = []
1129 for f in input_api.AffectedSourceFiles(source_file_filter):
1130 for linenum, line in f.ChangedContents():
1131 if 'msvs_cygwin_shell' in line:
1132 cygwin_shell.append(f.LocalPath())
1133 break
1135 if cygwin_shell:
1136 return [output_api.PresubmitError(
1137 'These files should not use msvs_cygwin_shell (the default is 0):',
1138 items=cygwin_shell)]
1139 return []
1142 def _CheckUserActionUpdate(input_api, output_api):
1143 """Checks if any new user action has been added."""
1144 if any('actions.xml' == input_api.os_path.basename(f) for f in
1145 input_api.LocalPaths()):
1146 # If actions.xml is already included in the changelist, the PRESUBMIT
1147 # for actions.xml will do a more complete presubmit check.
1148 return []
1150 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm'))
1151 action_re = r'[^a-zA-Z]UserMetricsAction\("([^"]*)'
1152 current_actions = None
1153 for f in input_api.AffectedFiles(file_filter=file_filter):
1154 for line_num, line in f.ChangedContents():
1155 match = input_api.re.search(action_re, line)
1156 if match:
1157 # Loads contents in tools/metrics/actions/actions.xml to memory. It's
1158 # loaded only once.
1159 if not current_actions:
1160 with open('tools/metrics/actions/actions.xml') as actions_f:
1161 current_actions = actions_f.read()
1162 # Search for the matched user action name in |current_actions|.
1163 for action_name in match.groups():
1164 action = 'name="{0}"'.format(action_name)
1165 if action not in current_actions:
1166 return [output_api.PresubmitPromptWarning(
1167 'File %s line %d: %s is missing in '
1168 'tools/metrics/actions/actions.xml. Please run '
1169 'tools/metrics/actions/extract_actions.py to update.'
1170 % (f.LocalPath(), line_num, action_name))]
1171 return []
1174 def _GetJSONParseError(input_api, filename, eat_comments=True):
1175 try:
1176 contents = input_api.ReadFile(filename)
1177 if eat_comments:
1178 json_comment_eater = input_api.os_path.join(
1179 input_api.PresubmitLocalPath(),
1180 'tools', 'json_comment_eater', 'json_comment_eater.py')
1181 process = input_api.subprocess.Popen(
1182 [input_api.python_executable, json_comment_eater],
1183 stdin=input_api.subprocess.PIPE,
1184 stdout=input_api.subprocess.PIPE,
1185 universal_newlines=True)
1186 (contents, _) = process.communicate(input=contents)
1188 input_api.json.loads(contents)
1189 except ValueError as e:
1190 return e
1191 return None
1194 def _GetIDLParseError(input_api, filename):
1195 try:
1196 contents = input_api.ReadFile(filename)
1197 idl_schema = input_api.os_path.join(
1198 input_api.PresubmitLocalPath(),
1199 'tools', 'json_schema_compiler', 'idl_schema.py')
1200 process = input_api.subprocess.Popen(
1201 [input_api.python_executable, idl_schema],
1202 stdin=input_api.subprocess.PIPE,
1203 stdout=input_api.subprocess.PIPE,
1204 stderr=input_api.subprocess.PIPE,
1205 universal_newlines=True)
1206 (_, error) = process.communicate(input=contents)
1207 return error or None
1208 except ValueError as e:
1209 return e
1212 def _CheckParseErrors(input_api, output_api):
1213 """Check that IDL and JSON files do not contain syntax errors."""
1214 actions = {
1215 '.idl': _GetIDLParseError,
1216 '.json': _GetJSONParseError,
1218 # These paths contain test data and other known invalid JSON files.
1219 excluded_patterns = [
1220 r'test[\\\/]data[\\\/]',
1221 r'^components[\\\/]policy[\\\/]resources[\\\/]policy_templates\.json$',
1223 # Most JSON files are preprocessed and support comments, but these do not.
1224 json_no_comments_patterns = [
1225 r'^testing[\\\/]',
1227 # Only run IDL checker on files in these directories.
1228 idl_included_patterns = [
1229 r'^chrome[\\\/]common[\\\/]extensions[\\\/]api[\\\/]',
1230 r'^extensions[\\\/]common[\\\/]api[\\\/]',
1233 def get_action(affected_file):
1234 filename = affected_file.LocalPath()
1235 return actions.get(input_api.os_path.splitext(filename)[1])
1237 def MatchesFile(patterns, path):
1238 for pattern in patterns:
1239 if input_api.re.search(pattern, path):
1240 return True
1241 return False
1243 def FilterFile(affected_file):
1244 action = get_action(affected_file)
1245 if not action:
1246 return False
1247 path = affected_file.LocalPath()
1249 if MatchesFile(excluded_patterns, path):
1250 return False
1252 if (action == _GetIDLParseError and
1253 not MatchesFile(idl_included_patterns, path)):
1254 return False
1255 return True
1257 results = []
1258 for affected_file in input_api.AffectedFiles(
1259 file_filter=FilterFile, include_deletes=False):
1260 action = get_action(affected_file)
1261 kwargs = {}
1262 if (action == _GetJSONParseError and
1263 MatchesFile(json_no_comments_patterns, affected_file.LocalPath())):
1264 kwargs['eat_comments'] = False
1265 parse_error = action(input_api,
1266 affected_file.AbsoluteLocalPath(),
1267 **kwargs)
1268 if parse_error:
1269 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
1270 (affected_file.LocalPath(), parse_error)))
1271 return results
1274 def _CheckJavaStyle(input_api, output_api):
1275 """Runs checkstyle on changed java files and returns errors if any exist."""
1276 import sys
1277 original_sys_path = sys.path
1278 try:
1279 sys.path = sys.path + [input_api.os_path.join(
1280 input_api.PresubmitLocalPath(), 'tools', 'android', 'checkstyle')]
1281 import checkstyle
1282 finally:
1283 # Restore sys.path to what it was before.
1284 sys.path = original_sys_path
1286 return checkstyle.RunCheckstyle(
1287 input_api, output_api, 'tools/android/checkstyle/chromium-style-5.0.xml',
1288 black_list=_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
1291 def _CheckForCopyrightedCode(input_api, output_api):
1292 """Verifies that newly added code doesn't contain copyrighted material
1293 and is properly licensed under the standard Chromium license.
1295 As there can be false positives, we maintain a whitelist file. This check
1296 also verifies that the whitelist file is up to date.
1298 import sys
1299 original_sys_path = sys.path
1300 try:
1301 sys.path = sys.path + [input_api.os_path.join(
1302 input_api.PresubmitLocalPath(), 'android_webview', 'tools')]
1303 import copyright_scanner
1304 finally:
1305 # Restore sys.path to what it was before.
1306 sys.path = original_sys_path
1308 return copyright_scanner.ScanAtPresubmit(input_api, output_api)
1311 def _CheckSingletonInHeaders(input_api, output_api):
1312 """Checks to make sure no header files have |Singleton<|."""
1313 def FileFilter(affected_file):
1314 # It's ok for base/memory/singleton.h to have |Singleton<|.
1315 black_list = (_EXCLUDED_PATHS +
1316 input_api.DEFAULT_BLACK_LIST +
1317 (r"^base[\\\/]memory[\\\/]singleton\.h$",))
1318 return input_api.FilterSourceFile(affected_file, black_list=black_list)
1320 pattern = input_api.re.compile(r'(?<!class\s)Singleton\s*<')
1321 files = []
1322 for f in input_api.AffectedSourceFiles(FileFilter):
1323 if (f.LocalPath().endswith('.h') or f.LocalPath().endswith('.hxx') or
1324 f.LocalPath().endswith('.hpp') or f.LocalPath().endswith('.inl')):
1325 contents = input_api.ReadFile(f)
1326 for line in contents.splitlines(False):
1327 if (not input_api.re.match(r'//', line) and # Strip C++ comment.
1328 pattern.search(line)):
1329 files.append(f)
1330 break
1332 if files:
1333 return [ output_api.PresubmitError(
1334 'Found Singleton<T> in the following header files.\n' +
1335 'Please move them to an appropriate source file so that the ' +
1336 'template gets instantiated in a single compilation unit.',
1337 files) ]
1338 return []
1341 _DEPRECATED_CSS = [
1342 # Values
1343 ( "-webkit-box", "flex" ),
1344 ( "-webkit-inline-box", "inline-flex" ),
1345 ( "-webkit-flex", "flex" ),
1346 ( "-webkit-inline-flex", "inline-flex" ),
1347 ( "-webkit-min-content", "min-content" ),
1348 ( "-webkit-max-content", "max-content" ),
1350 # Properties
1351 ( "-webkit-background-clip", "background-clip" ),
1352 ( "-webkit-background-origin", "background-origin" ),
1353 ( "-webkit-background-size", "background-size" ),
1354 ( "-webkit-box-shadow", "box-shadow" ),
1356 # Functions
1357 ( "-webkit-gradient", "gradient" ),
1358 ( "-webkit-repeating-gradient", "repeating-gradient" ),
1359 ( "-webkit-linear-gradient", "linear-gradient" ),
1360 ( "-webkit-repeating-linear-gradient", "repeating-linear-gradient" ),
1361 ( "-webkit-radial-gradient", "radial-gradient" ),
1362 ( "-webkit-repeating-radial-gradient", "repeating-radial-gradient" ),
1365 def _CheckNoDeprecatedCSS(input_api, output_api):
1366 """ Make sure that we don't use deprecated CSS
1367 properties, functions or values. Our external
1368 documentation is ignored by the hooks as it
1369 needs to be consumed by WebKit. """
1370 results = []
1371 file_inclusion_pattern = (r".+\.css$",)
1372 black_list = (_EXCLUDED_PATHS +
1373 _TEST_CODE_EXCLUDED_PATHS +
1374 input_api.DEFAULT_BLACK_LIST +
1375 (r"^chrome/common/extensions/docs",
1376 r"^chrome/docs",
1377 r"^native_client_sdk"))
1378 file_filter = lambda f: input_api.FilterSourceFile(
1379 f, white_list=file_inclusion_pattern, black_list=black_list)
1380 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1381 for line_num, line in fpath.ChangedContents():
1382 for (deprecated_value, value) in _DEPRECATED_CSS:
1383 if deprecated_value in line:
1384 results.append(output_api.PresubmitError(
1385 "%s:%d: Use of deprecated CSS %s, use %s instead" %
1386 (fpath.LocalPath(), line_num, deprecated_value, value)))
1387 return results
1390 _DEPRECATED_JS = [
1391 ( "__lookupGetter__", "Object.getOwnPropertyDescriptor" ),
1392 ( "__defineGetter__", "Object.defineProperty" ),
1393 ( "__defineSetter__", "Object.defineProperty" ),
1396 def _CheckNoDeprecatedJS(input_api, output_api):
1397 """Make sure that we don't use deprecated JS in Chrome code."""
1398 results = []
1399 file_inclusion_pattern = (r".+\.js$",) # TODO(dbeam): .html?
1400 black_list = (_EXCLUDED_PATHS + _TEST_CODE_EXCLUDED_PATHS +
1401 input_api.DEFAULT_BLACK_LIST)
1402 file_filter = lambda f: input_api.FilterSourceFile(
1403 f, white_list=file_inclusion_pattern, black_list=black_list)
1404 for fpath in input_api.AffectedFiles(file_filter=file_filter):
1405 for lnum, line in fpath.ChangedContents():
1406 for (deprecated, replacement) in _DEPRECATED_JS:
1407 if deprecated in line:
1408 results.append(output_api.PresubmitError(
1409 "%s:%d: Use of deprecated JS %s, use %s instead" %
1410 (fpath.LocalPath(), lnum, deprecated, replacement)))
1411 return results
1414 def _CommonChecks(input_api, output_api):
1415 """Checks common to both upload and commit."""
1416 results = []
1417 results.extend(input_api.canned_checks.PanProjectChecks(
1418 input_api, output_api,
1419 excluded_paths=_EXCLUDED_PATHS + _TESTRUNNER_PATHS))
1420 results.extend(_CheckAuthorizedAuthor(input_api, output_api))
1421 results.extend(
1422 _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
1423 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
1424 results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
1425 results.extend(_CheckNoNewWStrings(input_api, output_api))
1426 results.extend(_CheckNoDEPSGIT(input_api, output_api))
1427 results.extend(_CheckNoBannedFunctions(input_api, output_api))
1428 results.extend(_CheckNoPragmaOnce(input_api, output_api))
1429 results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
1430 results.extend(_CheckUnwantedDependencies(input_api, output_api))
1431 results.extend(_CheckFilePermissions(input_api, output_api))
1432 results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
1433 results.extend(_CheckIncludeOrder(input_api, output_api))
1434 results.extend(_CheckForVersionControlConflicts(input_api, output_api))
1435 results.extend(_CheckPatchFiles(input_api, output_api))
1436 results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
1437 results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
1438 results.extend(_CheckForInvalidOSMacros(input_api, output_api))
1439 results.extend(_CheckForInvalidIfDefinedMacros(input_api, output_api))
1440 # TODO(danakj): Remove this when base/move.h is removed.
1441 results.extend(_CheckForUsingSideEffectsOfPass(input_api, output_api))
1442 results.extend(_CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1443 results.extend(
1444 input_api.canned_checks.CheckChangeHasNoTabs(
1445 input_api,
1446 output_api,
1447 source_file_filter=lambda x: x.LocalPath().endswith('.grd')))
1448 results.extend(_CheckSpamLogging(input_api, output_api))
1449 results.extend(_CheckForAnonymousVariables(input_api, output_api))
1450 results.extend(_CheckCygwinShell(input_api, output_api))
1451 results.extend(_CheckUserActionUpdate(input_api, output_api))
1452 results.extend(_CheckNoDeprecatedCSS(input_api, output_api))
1453 results.extend(_CheckNoDeprecatedJS(input_api, output_api))
1454 results.extend(_CheckParseErrors(input_api, output_api))
1455 results.extend(_CheckForIPCRules(input_api, output_api))
1456 results.extend(_CheckForCopyrightedCode(input_api, output_api))
1457 results.extend(_CheckForWindowsLineEndings(input_api, output_api))
1458 results.extend(_CheckSingletonInHeaders(input_api, output_api))
1460 if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
1461 results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
1462 input_api, output_api,
1463 input_api.PresubmitLocalPath(),
1464 whitelist=[r'^PRESUBMIT_test\.py$']))
1465 return results
1468 def _CheckAuthorizedAuthor(input_api, output_api):
1469 """For non-googler/chromites committers, verify the author's email address is
1470 in AUTHORS.
1472 # TODO(maruel): Add it to input_api?
1473 import fnmatch
1475 author = input_api.change.author_email
1476 if not author:
1477 input_api.logging.info('No author, skipping AUTHOR check')
1478 return []
1479 authors_path = input_api.os_path.join(
1480 input_api.PresubmitLocalPath(), 'AUTHORS')
1481 valid_authors = (
1482 input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
1483 for line in open(authors_path))
1484 valid_authors = [item.group(1).lower() for item in valid_authors if item]
1485 if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
1486 input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
1487 return [output_api.PresubmitPromptWarning(
1488 ('%s is not in AUTHORS file. If you are a new contributor, please visit'
1489 '\n'
1490 'http://www.chromium.org/developers/contributing-code and read the '
1491 '"Legal" section\n'
1492 'If you are a chromite, verify the contributor signed the CLA.') %
1493 author)]
1494 return []
1497 def _CheckPatchFiles(input_api, output_api):
1498 problems = [f.LocalPath() for f in input_api.AffectedFiles()
1499 if f.LocalPath().endswith(('.orig', '.rej'))]
1500 if problems:
1501 return [output_api.PresubmitError(
1502 "Don't commit .rej and .orig files.", problems)]
1503 else:
1504 return []
1507 def _DidYouMeanOSMacro(bad_macro):
1508 try:
1509 return {'A': 'OS_ANDROID',
1510 'B': 'OS_BSD',
1511 'C': 'OS_CHROMEOS',
1512 'F': 'OS_FREEBSD',
1513 'L': 'OS_LINUX',
1514 'M': 'OS_MACOSX',
1515 'N': 'OS_NACL',
1516 'O': 'OS_OPENBSD',
1517 'P': 'OS_POSIX',
1518 'S': 'OS_SOLARIS',
1519 'W': 'OS_WIN'}[bad_macro[3].upper()]
1520 except KeyError:
1521 return ''
1524 def _CheckForInvalidOSMacrosInFile(input_api, f):
1525 """Check for sensible looking, totally invalid OS macros."""
1526 preprocessor_statement = input_api.re.compile(r'^\s*#')
1527 os_macro = input_api.re.compile(r'defined\((OS_[^)]+)\)')
1528 results = []
1529 for lnum, line in f.ChangedContents():
1530 if preprocessor_statement.search(line):
1531 for match in os_macro.finditer(line):
1532 if not match.group(1) in _VALID_OS_MACROS:
1533 good = _DidYouMeanOSMacro(match.group(1))
1534 did_you_mean = ' (did you mean %s?)' % good if good else ''
1535 results.append(' %s:%d %s%s' % (f.LocalPath(),
1536 lnum,
1537 match.group(1),
1538 did_you_mean))
1539 return results
1542 def _CheckForInvalidOSMacros(input_api, output_api):
1543 """Check all affected files for invalid OS macros."""
1544 bad_macros = []
1545 for f in input_api.AffectedFiles():
1546 if not f.LocalPath().endswith(('.py', '.js', '.html', '.css')):
1547 bad_macros.extend(_CheckForInvalidOSMacrosInFile(input_api, f))
1549 if not bad_macros:
1550 return []
1552 return [output_api.PresubmitError(
1553 'Possibly invalid OS macro[s] found. Please fix your code\n'
1554 'or add your macro to src/PRESUBMIT.py.', bad_macros)]
1557 def _CheckForInvalidIfDefinedMacrosInFile(input_api, f):
1558 """Check all affected files for invalid "if defined" macros."""
1559 ALWAYS_DEFINED_MACROS = (
1560 "TARGET_CPU_PPC",
1561 "TARGET_CPU_PPC64",
1562 "TARGET_CPU_68K",
1563 "TARGET_CPU_X86",
1564 "TARGET_CPU_ARM",
1565 "TARGET_CPU_MIPS",
1566 "TARGET_CPU_SPARC",
1567 "TARGET_CPU_ALPHA",
1568 "TARGET_IPHONE_SIMULATOR",
1569 "TARGET_OS_EMBEDDED",
1570 "TARGET_OS_IPHONE",
1571 "TARGET_OS_MAC",
1572 "TARGET_OS_UNIX",
1573 "TARGET_OS_WIN32",
1575 ifdef_macro = input_api.re.compile(r'^\s*#.*(?:ifdef\s|defined\()([^\s\)]+)')
1576 results = []
1577 for lnum, line in f.ChangedContents():
1578 for match in ifdef_macro.finditer(line):
1579 if match.group(1) in ALWAYS_DEFINED_MACROS:
1580 always_defined = ' %s is always defined. ' % match.group(1)
1581 did_you_mean = 'Did you mean \'#if %s\'?' % match.group(1)
1582 results.append(' %s:%d %s\n\t%s' % (f.LocalPath(),
1583 lnum,
1584 always_defined,
1585 did_you_mean))
1586 return results
1589 def _CheckForInvalidIfDefinedMacros(input_api, output_api):
1590 """Check all affected files for invalid "if defined" macros."""
1591 bad_macros = []
1592 for f in input_api.AffectedFiles():
1593 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1594 bad_macros.extend(_CheckForInvalidIfDefinedMacrosInFile(input_api, f))
1596 if not bad_macros:
1597 return []
1599 return [output_api.PresubmitError(
1600 'Found ifdef check on always-defined macro[s]. Please fix your code\n'
1601 'or check the list of ALWAYS_DEFINED_MACROS in src/PRESUBMIT.py.',
1602 bad_macros)]
1605 def _CheckForUsingSideEffectsOfPass(input_api, output_api):
1606 """Check all affected files for using side effects of Pass."""
1607 errors = []
1608 for f in input_api.AffectedFiles():
1609 if f.LocalPath().endswith(('.h', '.c', '.cc', '.m', '.mm')):
1610 for lnum, line in f.ChangedContents():
1611 # Disallow Foo(*my_scoped_thing.Pass()); See crbug.com/418297.
1612 if input_api.re.search(r'\*[a-zA-Z0-9_]+\.Pass\(\)', line):
1613 errors.append(output_api.PresubmitError(
1614 ('%s:%d uses *foo.Pass() to delete the contents of scoped_ptr. ' +
1615 'See crbug.com/418297.') % (f.LocalPath(), lnum)))
1616 return errors
1619 def _CheckForIPCRules(input_api, output_api):
1620 """Check for same IPC rules described in
1621 http://www.chromium.org/Home/chromium-security/education/security-tips-for-ipc
1623 base_pattern = r'IPC_ENUM_TRAITS\('
1624 inclusion_pattern = input_api.re.compile(r'(%s)' % base_pattern)
1625 comment_pattern = input_api.re.compile(r'//.*(%s)' % base_pattern)
1627 problems = []
1628 for f in input_api.AffectedSourceFiles(None):
1629 local_path = f.LocalPath()
1630 if not local_path.endswith('.h'):
1631 continue
1632 for line_number, line in f.ChangedContents():
1633 if inclusion_pattern.search(line) and not comment_pattern.search(line):
1634 problems.append(
1635 '%s:%d\n %s' % (local_path, line_number, line.strip()))
1637 if problems:
1638 return [output_api.PresubmitPromptWarning(
1639 _IPC_ENUM_TRAITS_DEPRECATED, problems)]
1640 else:
1641 return []
1644 def _CheckForWindowsLineEndings(input_api, output_api):
1645 """Check source code and known ascii text files for Windows style line
1646 endings.
1648 known_text_files = r'.*\.(txt|html|htm|mhtml|py|gyp|gypi|gn|isolate)$'
1650 file_inclusion_pattern = (
1651 known_text_files,
1652 r'.+%s' % _IMPLEMENTATION_EXTENSIONS
1655 filter = lambda f: input_api.FilterSourceFile(
1656 f, white_list=file_inclusion_pattern, black_list=None)
1657 files = [f.LocalPath() for f in
1658 input_api.AffectedSourceFiles(filter)]
1660 problems = []
1662 for file in files:
1663 fp = open(file, 'r')
1664 for line in fp:
1665 if line.endswith('\r\n'):
1666 problems.append(file)
1667 break
1668 fp.close()
1670 if problems:
1671 return [output_api.PresubmitPromptWarning('Are you sure that you want '
1672 'these files to contain Windows style line endings?\n' +
1673 '\n'.join(problems))]
1675 return []
1678 def CheckChangeOnUpload(input_api, output_api):
1679 results = []
1680 results.extend(_CommonChecks(input_api, output_api))
1681 results.extend(_CheckValidHostsInDEPS(input_api, output_api))
1682 results.extend(_CheckJavaStyle(input_api, output_api))
1683 results.extend(
1684 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1685 results.extend(_CheckUmaHistogramChanges(input_api, output_api))
1686 return results
1689 def GetTryServerMasterForBot(bot):
1690 """Returns the Try Server master for the given bot.
1692 It tries to guess the master from the bot name, but may still fail
1693 and return None. There is no longer a default master.
1695 # Potentially ambiguous bot names are listed explicitly.
1696 master_map = {
1697 'chromium_presubmit': 'tryserver.chromium.linux',
1698 'blink_presubmit': 'tryserver.chromium.linux',
1699 'tools_build_presubmit': 'tryserver.chromium.linux',
1701 master = master_map.get(bot)
1702 if not master:
1703 if 'linux' in bot or 'android' in bot or 'presubmit' in bot:
1704 master = 'tryserver.chromium.linux'
1705 elif 'win' in bot:
1706 master = 'tryserver.chromium.win'
1707 elif 'mac' in bot or 'ios' in bot:
1708 master = 'tryserver.chromium.mac'
1709 return master
1712 def GetDefaultTryConfigs(bots):
1713 """Returns a list of ('bot', set(['tests']), filtered by [bots].
1716 builders_and_tests = dict((bot, set(['defaulttests'])) for bot in bots)
1718 # Build up the mapping from tryserver master to bot/test.
1719 out = dict()
1720 for bot, tests in builders_and_tests.iteritems():
1721 out.setdefault(GetTryServerMasterForBot(bot), {})[bot] = tests
1722 return out
1725 def CheckChangeOnCommit(input_api, output_api):
1726 results = []
1727 results.extend(_CommonChecks(input_api, output_api))
1728 # TODO(thestig) temporarily disabled, doesn't work in third_party/
1729 #results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
1730 # input_api, output_api, sources))
1731 # Make sure the tree is 'open'.
1732 results.extend(input_api.canned_checks.CheckTreeIsOpen(
1733 input_api,
1734 output_api,
1735 json_url='http://chromium-status.appspot.com/current?format=json'))
1737 results.extend(input_api.canned_checks.CheckChangeHasBugField(
1738 input_api, output_api))
1739 results.extend(input_api.canned_checks.CheckChangeHasDescription(
1740 input_api, output_api))
1741 return results
1744 def GetPreferredTryMasters(project, change):
1745 import re
1746 files = change.LocalPaths()
1748 import os
1749 import json
1750 with open(os.path.join(
1751 change.RepositoryRoot(), 'testing', 'commit_queue', 'config.json')) as f:
1752 cq_config = json.load(f)
1753 cq_verifiers = cq_config.get('verifiers_no_patch', {})
1754 cq_try_jobs = cq_verifiers.get('try_job_verifier', {})
1755 builders = cq_try_jobs.get('launched', {})
1757 for master, master_config in cq_try_jobs.get('triggered', {}).iteritems():
1758 for triggered_bot in master_config:
1759 builders.get(master, {}).pop(triggered_bot, None)
1761 # Explicitly iterate over copies of dicts since we mutate them.
1762 for master in builders.keys():
1763 for builder in builders[master].keys():
1764 # Do not trigger presubmit builders, since they're likely to fail
1765 # (e.g. OWNERS checks before finished code review), and we're
1766 # running local presubmit anyway.
1767 if 'presubmit' in builder:
1768 builders[master].pop(builder)
1770 return builders