cpplint.py
Go to the documentation of this file.
1 # Copyright (c) 2009 Google Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 
29 """Does google-lint on c++ files.
30 
31 The goal of this script is to identify places in the code that *may*
32 be in non-compliance with google style. It does not attempt to fix
33 up these problems -- the point is to educate. It does also not
34 attempt to find all problems, or to ensure that everything it does
35 find is legitimately a problem.
36 
37 In particular, we can get very confused by /* and // inside strings!
38 We do a small hack, which is to ignore //'s with "'s after them on the
39 same line, but it is far from perfect (in either direction).
40 """
41 
42 import codecs
43 import copy
44 import getopt
45 import math # for log
46 import os
47 import re
48 import sre_compile
49 import string
50 import sys
51 import unicodedata
52 import sysconfig
53 
54 try:
55  xrange # Python 2
56 except NameError:
57  xrange = range # Python 3
58  unicode = str
59 else:
60  sys.stderr = codecs.StreamReaderWriter(sys.stderr,
61  codecs.getreader('utf8'),
62  codecs.getwriter('utf8'),
63  'replace')
64 
65 _USAGE = """
66 Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...]
67  [--counting=total|toplevel|detailed] [--root=subdir]
68  [--linelength=digits] [--headers=x,y,...]
69  [--quiet]
70  <file> [file] ...
71 
72  The style guidelines this tries to follow are those in
73  https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml
74 
75  Every problem is given a confidence score from 1-5, with 5 meaning we are
76  certain of the problem, and 1 meaning it could be a legitimate construct.
77  This will miss some errors, and is not a substitute for a code review.
78 
79  To suppress false-positive errors of a certain category, add a
80  'NOLINT(category)' comment to the line. NOLINT or NOLINT(*)
81  suppresses errors of all categories on that line.
82 
83  The files passed in will be linted; at least one file must be provided.
84  Default linted extensions are .cc, .cpp, .cu, .cuh and .h. Change the
85  extensions with the --extensions flag.
86 
87  Flags:
88 
89  output=vs7
90  By default, the output is formatted to ease emacs parsing. Visual Studio
91  compatible output (vs7) may also be used. Other formats are unsupported.
92 
93  verbose=#
94  Specify a number 0-5 to restrict errors to certain verbosity levels.
95 
96  quiet
97  Don't print anything if no errors are found.
98 
99  filter=-x,+y,...
100  Specify a comma-separated list of category-filters to apply: only
101  error messages whose category names pass the filters will be printed.
102  (Category names are printed with the message and look like
103  "[whitespace/indent]".) Filters are evaluated left to right.
104  "-FOO" and "FOO" means "do not print categories that start with FOO".
105  "+FOO" means "do print categories that start with FOO".
106 
107  Examples: --filter=-whitespace,+whitespace/braces
108  --filter=whitespace,runtime/printf,+runtime/printf_format
109  --filter=-,+build/include_what_you_use
110 
111  To see a list of all the categories used in cpplint, pass no arg:
112  --filter=
113 
114  counting=total|toplevel|detailed
115  The total number of errors found is always printed. If
116  'toplevel' is provided, then the count of errors in each of
117  the top-level categories like 'build' and 'whitespace' will
118  also be printed. If 'detailed' is provided, then a count
119  is provided for each category like 'build/class'.
120 
121  root=subdir
122  The root directory used for deriving header guard CPP variable.
123  By default, the header guard CPP variable is calculated as the relative
124  path to the directory that contains .git, .hg, or .svn. When this flag
125  is specified, the relative path is calculated from the specified
126  directory. If the specified directory does not exist, this flag is
127  ignored.
128 
129  Examples:
130  Assuming that top/src/.git exists (and cwd=top/src), the header guard
131  CPP variables for top/src/chrome/browser/ui/browser.h are:
132 
133  No flag => CHROME_BROWSER_UI_BROWSER_H_
134  --root=chrome => BROWSER_UI_BROWSER_H_
135  --root=chrome/browser => UI_BROWSER_H_
136  --root=.. => SRC_CHROME_BROWSER_UI_BROWSER_H_
137 
138  linelength=digits
139  This is the allowed line length for the project. The default value is
140  80 characters.
141 
142  Examples:
143  --linelength=120
144 
145  extensions=extension,extension,...
146  The allowed file extensions that cpplint will check
147 
148  Examples:
149  --extensions=hpp,cpp
150 
151  headers=x,y,...
152  The header extensions that cpplint will treat as .h in checks. Values are
153  automatically added to --extensions list.
154 
155  Examples:
156  --headers=hpp,hxx
157  --headers=hpp
158 
159  cpplint.py supports per-directory configurations specified in CPPLINT.cfg
160  files. CPPLINT.cfg file can contain a number of key=value pairs.
161  Currently the following options are supported:
162 
163  set noparent
164  filter=+filter1,-filter2,...
165  exclude_files=regex
166  linelength=80
167  root=subdir
168  headers=x,y,...
169 
170  "set noparent" option prevents cpplint from traversing directory tree
171  upwards looking for more .cfg files in parent directories. This option
172  is usually placed in the top-level project directory.
173 
174  The "filter" option is similar in function to --filter flag. It specifies
175  message filters in addition to the |_DEFAULT_FILTERS| and those specified
176  through --filter command-line flag.
177 
178  "exclude_files" allows to specify a regular expression to be matched against
179  a file name. If the expression matches, the file is skipped and not run
180  through liner.
181 
182  "linelength" allows to specify the allowed line length for the project.
183 
184  The "root" option is similar in function to the --root flag (see example
185  above). Paths are relative to the directory of the CPPLINT.cfg.
186 
187  The "headers" option is similar in function to the --headers flag
188  (see example above).
189 
190  CPPLINT.cfg has an effect on files in the same directory and all
191  sub-directories, unless overridden by a nested configuration file.
192 
193  Example file:
194  filter=-build/include_order,+build/include_alpha
195  exclude_files=.*\.cc
196 
197  The above example disables build/include_order warning and enables
198  build/include_alpha as well as excludes all .cc from being
199  processed by linter, in the current directory (where the .cfg
200  file is located) and all sub-directories.
201 """
202 
203 # We categorize each error message we print. Here are the categories.
204 # We want an explicit list so we can list them all in cpplint --filter=.
205 # If you add a new error message with a new category, add it to the list
206 # here! cpplint_unittest.py should tell you if you forget to do this.
207 _ERROR_CATEGORIES = [
208  'build/class',
209  'build/c++11',
210  'build/c++14',
211  'build/c++tr1',
212  'build/deprecated',
213  'build/endif_comment',
214  'build/explicit_make_pair',
215  'build/forward_decl',
216  'build/header_guard',
217  'build/include',
218  'build/include_alpha',
219  'build/include_order',
220  'build/include_what_you_use',
221  'build/namespaces',
222  'build/printf_format',
223  'build/storage_class',
224  'legal/copyright',
225  'readability/alt_tokens',
226  'readability/braces',
227  'readability/casting',
228  'readability/check',
229  'readability/constructors',
230  'readability/fn_size',
231  'readability/inheritance',
232  'readability/multiline_comment',
233  'readability/multiline_string',
234  'readability/namespace',
235  'readability/nolint',
236  'readability/nul',
237  'readability/strings',
238  'readability/todo',
239  'readability/utf8',
240  'runtime/arrays',
241  'runtime/casting',
242  'runtime/explicit',
243  'runtime/int',
244  'runtime/init',
245  'runtime/invalid_increment',
246  'runtime/member_string_references',
247  'runtime/memset',
248  'runtime/indentation_namespace',
249  'runtime/operator',
250  'runtime/printf',
251  'runtime/printf_format',
252  'runtime/references',
253  'runtime/string',
254  'runtime/threadsafe_fn',
255  'runtime/vlog',
256  'whitespace/blank_line',
257  'whitespace/braces',
258  'whitespace/comma',
259  'whitespace/comments',
260  'whitespace/empty_conditional_body',
261  'whitespace/empty_if_body',
262  'whitespace/empty_loop_body',
263  'whitespace/end_of_line',
264  'whitespace/ending_newline',
265  'whitespace/forcolon',
266  'whitespace/indent',
267  'whitespace/line_length',
268  'whitespace/newline',
269  'whitespace/operators',
270  'whitespace/parens',
271  'whitespace/semicolon',
272  'whitespace/tab',
273  'whitespace/todo',
274  ]
275 
276 # These error categories are no longer enforced by cpplint, but for backwards-
277 # compatibility they may still appear in NOLINT comments.
278 _LEGACY_ERROR_CATEGORIES = [
279  'readability/streams',
280  'readability/function',
281  ]
282 
283 # The default state of the category filter. This is overridden by the --filter=
284 # flag. By default all errors are on, so only add here categories that should be
285 # off by default (i.e., categories that must be enabled by the --filter= flags).
286 # All entries here should start with a '-' or '+', as in the --filter= flag.
287 _DEFAULT_FILTERS = ['-build/include_alpha']
288 
289 # The default list of categories suppressed for C (not C++) files.
290 _DEFAULT_C_SUPPRESSED_CATEGORIES = [
291  'readability/casting',
292  ]
293 
294 # The default list of categories suppressed for Linux Kernel files.
295 _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES = [
296  'whitespace/tab',
297  ]
298 
299 # We used to check for high-bit characters, but after much discussion we
300 # decided those were OK, as long as they were in UTF-8 and didn't represent
301 # hard-coded international strings, which belong in a separate i18n file.
302 
303 # C++ headers
304 _CPP_HEADERS = frozenset([
305  # Legacy
306  'algobase.h',
307  'algo.h',
308  'alloc.h',
309  'builtinbuf.h',
310  'bvector.h',
311  'complex.h',
312  'defalloc.h',
313  'deque.h',
314  'editbuf.h',
315  'fstream.h',
316  'function.h',
317  'hash_map',
318  'hash_map.h',
319  'hash_set',
320  'hash_set.h',
321  'hashtable.h',
322  'heap.h',
323  'indstream.h',
324  'iomanip.h',
325  'iostream.h',
326  'istream.h',
327  'iterator.h',
328  'list.h',
329  'map.h',
330  'multimap.h',
331  'multiset.h',
332  'ostream.h',
333  'pair.h',
334  'parsestream.h',
335  'pfstream.h',
336  'procbuf.h',
337  'pthread_alloc',
338  'pthread_alloc.h',
339  'rope',
340  'rope.h',
341  'ropeimpl.h',
342  'set.h',
343  'slist',
344  'slist.h',
345  'stack.h',
346  'stdiostream.h',
347  'stl_alloc.h',
348  'stl_relops.h',
349  'streambuf.h',
350  'stream.h',
351  'strfile.h',
352  'strstream.h',
353  'tempbuf.h',
354  'tree.h',
355  'type_traits.h',
356  'vector.h',
357  # 17.6.1.2 C++ library headers
358  'algorithm',
359  'array',
360  'atomic',
361  'bitset',
362  'chrono',
363  'codecvt',
364  'complex',
365  'condition_variable',
366  'deque',
367  'exception',
368  'forward_list',
369  'fstream',
370  'functional',
371  'future',
372  'initializer_list',
373  'iomanip',
374  'ios',
375  'iosfwd',
376  'iostream',
377  'istream',
378  'iterator',
379  'limits',
380  'list',
381  'locale',
382  'map',
383  'memory',
384  'mutex',
385  'new',
386  'numeric',
387  'ostream',
388  'queue',
389  'random',
390  'ratio',
391  'regex',
392  'scoped_allocator',
393  'set',
394  'sstream',
395  'stack',
396  'stdexcept',
397  'streambuf',
398  'string',
399  'strstream',
400  'system_error',
401  'thread',
402  'tuple',
403  'typeindex',
404  'typeinfo',
405  'type_traits',
406  'unordered_map',
407  'unordered_set',
408  'utility',
409  'valarray',
410  'vector',
411  # 17.6.1.2 C++ headers for C library facilities
412  'cassert',
413  'ccomplex',
414  'cctype',
415  'cerrno',
416  'cfenv',
417  'cfloat',
418  'cinttypes',
419  'ciso646',
420  'climits',
421  'clocale',
422  'cmath',
423  'csetjmp',
424  'csignal',
425  'cstdalign',
426  'cstdarg',
427  'cstdbool',
428  'cstddef',
429  'cstdint',
430  'cstdio',
431  'cstdlib',
432  'cstring',
433  'ctgmath',
434  'ctime',
435  'cuchar',
436  'cwchar',
437  'cwctype',
438  ])
439 
440 # Type names
441 _TYPES = re.compile(
442  r'^(?:'
443  # [dcl.type.simple]
444  r'(char(16_t|32_t)?)|wchar_t|'
445  r'bool|short|int|long|signed|unsigned|float|double|'
446  # [support.types]
447  r'(ptrdiff_t|size_t|max_align_t|nullptr_t)|'
448  # [cstdint.syn]
449  r'(u?int(_fast|_least)?(8|16|32|64)_t)|'
450  r'(u?int(max|ptr)_t)|'
451  r')$')
452 
453 
454 # These headers are excluded from [build/include] and [build/include_order]
455 # checks:
456 # - Anything not following google file name conventions (containing an
457 # uppercase character, such as Python.h or nsStringAPI.h, for example).
458 # - Lua headers.
459 _THIRD_PARTY_HEADERS_PATTERN = re.compile(
460  r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$')
461 
462 # Pattern for matching FileInfo.BaseName() against test file name
463 _TEST_FILE_SUFFIX = r'(_test|_unittest|_regtest)$'
464 
465 # Pattern that matches only complete whitespace, possibly across multiple lines.
466 _EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL)
467 
468 # Assertion macros. These are defined in base/logging.h and
469 # testing/base/public/gunit.h.
470 _CHECK_MACROS = [
471  'DCHECK', 'CHECK',
472  'EXPECT_TRUE', 'ASSERT_TRUE',
473  'EXPECT_FALSE', 'ASSERT_FALSE',
474  ]
475 
476 # Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE
477 _CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS])
478 
479 for op, replacement in [('==', 'EQ'), ('!=', 'NE'),
480  ('>=', 'GE'), ('>', 'GT'),
481  ('<=', 'LE'), ('<', 'LT')]:
482  _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement
483  _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement
484  _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement
485  _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement
486 
487 for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'),
488  ('>=', 'LT'), ('>', 'LE'),
489  ('<=', 'GT'), ('<', 'GE')]:
490  _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement
491  _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement
492 
493 # Alternative tokens and their replacements. For full list, see section 2.5
494 # Alternative tokens [lex.digraph] in the C++ standard.
495 #
496 # Digraphs (such as '%:') are not included here since it's a mess to
497 # match those on a word boundary.
498 _ALT_TOKEN_REPLACEMENT = {
499  'and': '&&',
500  'bitor': '|',
501  'or': '||',
502  'xor': '^',
503  'compl': '~',
504  'bitand': '&',
505  'and_eq': '&=',
506  'or_eq': '|=',
507  'xor_eq': '^=',
508  'not': '!',
509  'not_eq': '!='
510  }
511 
512 # Compile regular expression that matches all the above keywords. The "[ =()]"
513 # bit is meant to avoid matching these keywords outside of boolean expressions.
514 #
515 # False positives include C-style multi-line comments and multi-line strings
516 # but those have always been troublesome for cpplint.
517 _ALT_TOKEN_REPLACEMENT_PATTERN = re.compile(
518  r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)')
519 
520 
521 # These constants define types of headers for use with
522 # _IncludeState.CheckNextIncludeOrder().
523 _C_SYS_HEADER = 1
524 _CPP_SYS_HEADER = 2
525 _LIKELY_MY_HEADER = 3
526 _POSSIBLE_MY_HEADER = 4
527 _OTHER_HEADER = 5
528 
529 # These constants define the current inline assembly state
530 _NO_ASM = 0 # Outside of inline assembly block
531 _INSIDE_ASM = 1 # Inside inline assembly block
532 _END_ASM = 2 # Last line of inline assembly block
533 _BLOCK_ASM = 3 # The whole block is an inline assembly block
534 
535 # Match start of assembly blocks
536 _MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)'
537  r'(?:\s+(volatile|__volatile__))?'
538  r'\s*[{(]')
539 
540 # Match strings that indicate we're working on a C (not C++) file.
541 _SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|'
542  r'vim?:\s*.*(\s*|:)filetype=c(\s*|:|$))')
543 
544 # Match string that indicates we're working on a Linux Kernel file.
545 _SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)')
546 
547 _regexp_compile_cache = {}
548 
549 # {str, set(int)}: a map from error categories to sets of linenumbers
550 # on which those errors are expected and should be suppressed.
551 _error_suppressions = {}
552 
553 # The root directory used for deriving header guard CPP variable.
554 # This is set by --root flag.
555 _root = None
556 _root_debug = False
557 
558 # The allowed line length of files.
559 # This is set by --linelength flag.
560 _line_length = 80
561 
562 # The allowed extensions for file names
563 # This is set by --extensions flag.
564 _valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh'])
565 
566 # Treat all headers starting with 'h' equally: .h, .hpp, .hxx etc.
567 # This is set by --headers flag.
568 _hpp_headers = set(['h'])
569 
570 # {str, bool}: a map from error categories to booleans which indicate if the
571 # category should be suppressed for every line.
572 _global_error_suppressions = {}
573 
575  global _hpp_headers
576  try:
577  _hpp_headers = set(val.split(','))
578  # Automatically append to extensions list so it does not have to be set 2 times
579  _valid_extensions.update(_hpp_headers)
580  except ValueError:
581  PrintUsage('Header extensions must be comma separated list.')
582 
583 def IsHeaderExtension(file_extension):
584  return file_extension in _hpp_headers
585 
586 def ParseNolintSuppressions(filename, raw_line, linenum, error):
587  """Updates the global list of line error-suppressions.
588 
589  Parses any NOLINT comments on the current line, updating the global
590  error_suppressions store. Reports an error if the NOLINT comment
591  was malformed.
592 
593  Args:
594  filename: str, the name of the input file.
595  raw_line: str, the line of input text, with comments.
596  linenum: int, the number of the current line.
597  error: function, an error handler.
598  """
599  matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
600  if matched:
601  if matched.group(1):
602  suppressed_line = linenum + 1
603  else:
604  suppressed_line = linenum
605  category = matched.group(2)
606  if category in (None, '(*)'): # => "suppress all"
607  _error_suppressions.setdefault(None, set()).add(suppressed_line)
608  else:
609  if category.startswith('(') and category.endswith(')'):
610  category = category[1:-1]
611  if category in _ERROR_CATEGORIES:
612  _error_suppressions.setdefault(category, set()).add(suppressed_line)
613  elif category not in _LEGACY_ERROR_CATEGORIES:
614  error(filename, linenum, 'readability/nolint', 5,
615  'Unknown NOLINT error category: %s' % category)
616 
617 
619  """Updates the list of global error suppressions.
620 
621  Parses any lint directives in the file that have global effect.
622 
623  Args:
624  lines: An array of strings, each representing a line of the file, with the
625  last element being empty if the file is terminated with a newline.
626  """
627  for line in lines:
628  if _SEARCH_C_FILE.search(line):
629  for category in _DEFAULT_C_SUPPRESSED_CATEGORIES:
630  _global_error_suppressions[category] = True
631  if _SEARCH_KERNEL_FILE.search(line):
632  for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES:
633  _global_error_suppressions[category] = True
634 
635 
637  """Resets the set of NOLINT suppressions to empty."""
638  _error_suppressions.clear()
639  _global_error_suppressions.clear()
640 
641 
642 def IsErrorSuppressedByNolint(category, linenum):
643  """Returns true if the specified error category is suppressed on this line.
644 
645  Consults the global error_suppressions map populated by
646  ParseNolintSuppressions/ProcessGlobalSuppresions/ResetNolintSuppressions.
647 
648  Args:
649  category: str, the category of the error.
650  linenum: int, the current line number.
651  Returns:
652  bool, True iff the error should be suppressed due to a NOLINT comment or
653  global suppression.
654  """
655  return (_global_error_suppressions.get(category, False) or
656  linenum in _error_suppressions.get(category, set()) or
657  linenum in _error_suppressions.get(None, set()))
658 
659 
660 def Match(pattern, s):
661  """Matches the string with the pattern, caching the compiled regexp."""
662  # The regexp compilation caching is inlined in both Match and Search for
663  # performance reasons; factoring it out into a separate function turns out
664  # to be noticeably expensive.
665  if pattern not in _regexp_compile_cache:
666  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
667  return _regexp_compile_cache[pattern].match(s)
668 
669 
670 def ReplaceAll(pattern, rep, s):
671  """Replaces instances of pattern in a string with a replacement.
672 
673  The compiled regex is kept in a cache shared by Match and Search.
674 
675  Args:
676  pattern: regex pattern
677  rep: replacement text
678  s: search string
679 
680  Returns:
681  string with replacements made (or original string if no replacements)
682  """
683  if pattern not in _regexp_compile_cache:
684  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
685  return _regexp_compile_cache[pattern].sub(rep, s)
686 
687 
688 def Search(pattern, s):
689  """Searches the string for the pattern, caching the compiled regexp."""
690  if pattern not in _regexp_compile_cache:
691  _regexp_compile_cache[pattern] = sre_compile.compile(pattern)
692  return _regexp_compile_cache[pattern].search(s)
693 
694 
696  """File extension (excluding dot) matches a source file extension."""
697  return s in ('c', 'cc', 'cpp', 'cxx')
698 
699 
700 class _IncludeState(object):
701  """Tracks line numbers for includes, and the order in which includes appear.
702 
703  include_list contains list of lists of (header, line number) pairs.
704  It's a lists of lists rather than just one flat list to make it
705  easier to update across preprocessor boundaries.
706 
707  Call CheckNextIncludeOrder() once for each header in the file, passing
708  in the type constants defined above. Calls in an illegal order will
709  raise an _IncludeError with an appropriate error message.
710 
711  """
712  # self._section will move monotonically through this set. If it ever
713  # needs to move backwards, CheckNextIncludeOrder will raise an error.
714  _INITIAL_SECTION = 0
715  _MY_H_SECTION = 1
716  _C_SECTION = 2
717  _CPP_SECTION = 3
718  _OTHER_H_SECTION = 4
719 
720  _TYPE_NAMES = {
721  _C_SYS_HEADER: 'C system header',
722  _CPP_SYS_HEADER: 'C++ system header',
723  _LIKELY_MY_HEADER: 'header this file implements',
724  _POSSIBLE_MY_HEADER: 'header this file may implement',
725  _OTHER_HEADER: 'other header',
726  }
727  _SECTION_NAMES = {
728  _INITIAL_SECTION: "... nothing. (This can't be an error.)",
729  _MY_H_SECTION: 'a header this file implements',
730  _C_SECTION: 'C system header',
731  _CPP_SECTION: 'C++ system header',
732  _OTHER_H_SECTION: 'other header',
733  }
734 
735  def __init__(self):
736  self.include_list = [[]]
737  self.ResetSection('')
738 
739  def FindHeader(self, header):
740  """Check if a header has already been included.
741 
742  Args:
743  header: header to check.
744  Returns:
745  Line number of previous occurrence, or -1 if the header has not
746  been seen before.
747  """
748  for section_list in self.include_list:
749  for f in section_list:
750  if f[0] == header:
751  return f[1]
752  return -1
753 
754  def ResetSection(self, directive):
755  """Reset section checking for preprocessor directive.
756 
757  Args:
758  directive: preprocessor directive (e.g. "if", "else").
759  """
760  # The name of the current section.
762  # The path of last found header.
763  self._last_header = ''
764 
765  # Update list of includes. Note that we never pop from the
766  # include list.
767  if directive in ('if', 'ifdef', 'ifndef'):
768  self.include_list.append([])
769  elif directive in ('else', 'elif'):
770  self.include_list[-1] = []
771 
772  def SetLastHeader(self, header_path):
773  self._last_header = header_path
774 
775  def CanonicalizeAlphabeticalOrder(self, header_path):
776  """Returns a path canonicalized for alphabetical comparison.
777 
778  - replaces "-" with "_" so they both cmp the same.
779  - removes '-inl' since we don't require them to be after the main header.
780  - lowercase everything, just in case.
781 
782  Args:
783  header_path: Path to be canonicalized.
784 
785  Returns:
786  Canonicalized path.
787  """
788  return header_path.replace('-inl.h', '.h').replace('-', '_').lower()
789 
790  def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path):
791  """Check if a header is in alphabetical order with the previous header.
792 
793  Args:
794  clean_lines: A CleansedLines instance containing the file.
795  linenum: The number of the line to check.
796  header_path: Canonicalized header to be checked.
797 
798  Returns:
799  Returns true if the header is in alphabetical order.
800  """
801  # If previous section is different from current section, _last_header will
802  # be reset to empty string, so it's always less than current header.
803  #
804  # If previous line was a blank line, assume that the headers are
805  # intentionally sorted the way they are.
806  if (self._last_header > header_path and
807  Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])):
808  return False
809  return True
810 
811  def CheckNextIncludeOrder(self, header_type):
812  """Returns a non-empty error message if the next header is out of order.
813 
814  This function also updates the internal state to be ready to check
815  the next include.
816 
817  Args:
818  header_type: One of the _XXX_HEADER constants defined above.
819 
820  Returns:
821  The empty string if the header is in the right order, or an
822  error message describing what's wrong.
823 
824  """
825  error_message = ('Found %s after %s' %
826  (self._TYPE_NAMES[header_type],
827  self._SECTION_NAMES[self._section]))
828 
829  last_section = self._section
830 
831  if header_type == _C_SYS_HEADER:
832  if self._section <= self._C_SECTION:
833  self._section = self._C_SECTION
834  else:
835  self._last_header = ''
836  return error_message
837  elif header_type == _CPP_SYS_HEADER:
838  if self._section <= self._CPP_SECTION:
839  self._section = self._CPP_SECTION
840  else:
841  self._last_header = ''
842  return error_message
843  elif header_type == _LIKELY_MY_HEADER:
844  if self._section <= self._MY_H_SECTION:
845  self._section = self._MY_H_SECTION
846  else:
847  self._section = self._OTHER_H_SECTION
848  elif header_type == _POSSIBLE_MY_HEADER:
849  if self._section <= self._MY_H_SECTION:
850  self._section = self._MY_H_SECTION
851  else:
852  # This will always be the fallback because we're not sure
853  # enough that the header is associated with this file.
854  self._section = self._OTHER_H_SECTION
855  else:
856  assert header_type == _OTHER_HEADER
857  self._section = self._OTHER_H_SECTION
858 
859  if last_section != self._section:
860  self._last_header = ''
861 
862  return ''
863 
864 
865 class _CppLintState(object):
866  """Maintains module-wide state.."""
867 
868  def __init__(self):
869  self.verbose_level = 1 # global setting.
870  self.error_count = 0 # global count of reported errors
871  # filters to apply when emitting error messages
872  self.filters = _DEFAULT_FILTERS[:]
873  # backup of filter list. Used to restore the state after each file.
874  self._filters_backup = self.filters[:]
875  self.counting = 'total' # In what way are we counting errors?
876  self.errors_by_category = {} # string to int dict storing error counts
877  self.quiet = False # Suppress non-error messagess?
878 
879  # output format:
880  # "emacs" - format that emacs can parse (default)
881  # "vs7" - format that Microsoft Visual Studio 7 can parse
882  self.output_format = 'emacs'
883 
884  def SetOutputFormat(self, output_format):
885  """Sets the output format for errors."""
886  self.output_format = output_format
887 
888  def SetQuiet(self, quiet):
889  """Sets the module's quiet settings, and returns the previous setting."""
890  last_quiet = self.quiet
891  self.quiet = quiet
892  return last_quiet
893 
894  def SetVerboseLevel(self, level):
895  """Sets the module's verbosity, and returns the previous setting."""
896  last_verbose_level = self.verbose_level
897  self.verbose_level = level
898  return last_verbose_level
899 
900  def SetCountingStyle(self, counting_style):
901  """Sets the module's counting options."""
902  self.counting = counting_style
903 
904  def SetFilters(self, filters):
905  """Sets the error-message filters.
906 
907  These filters are applied when deciding whether to emit a given
908  error message.
909 
910  Args:
911  filters: A string of comma-separated filters (eg "+whitespace/indent").
912  Each filter should start with + or -; else we die.
913 
914  Raises:
915  ValueError: The comma-separated filters did not all start with '+' or '-'.
916  E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter"
917  """
918  # Default filters always have less priority than the flag ones.
919  self.filters = _DEFAULT_FILTERS[:]
920  self.AddFilters(filters)
921 
922  def AddFilters(self, filters):
923  """ Adds more filters to the existing list of error-message filters. """
924  for filt in filters.split(','):
925  clean_filt = filt.strip()
926  if clean_filt:
927  self.filters.append(clean_filt)
928  for filt in self.filters:
929  if not (filt.startswith('+') or filt.startswith('-')):
930  raise ValueError('Every filter in --filters must start with + or -'
931  ' (%s does not)' % filt)
932 
933  def BackupFilters(self):
934  """ Saves the current filter list to backup storage."""
935  self._filters_backup = self.filters[:]
936 
937  def RestoreFilters(self):
938  """ Restores filters previously backed up."""
939  self.filters = self._filters_backup[:]
940 
941  def ResetErrorCounts(self):
942  """Sets the module's error statistic back to zero."""
943  self.error_count = 0
944  self.errors_by_category = {}
945 
946  def IncrementErrorCount(self, category):
947  """Bumps the module's error statistic."""
948  self.error_count += 1
949  if self.counting in ('toplevel', 'detailed'):
950  if self.counting != 'detailed':
951  category = category.split('/')[0]
952  if category not in self.errors_by_category:
953  self.errors_by_category[category] = 0
954  self.errors_by_category[category] += 1
955 
956  def PrintErrorCounts(self):
957  """Print a summary of errors by category, and the total."""
958  for category, count in self.errors_by_category.items():
959  sys.stderr.write('Category \'%s\' errors found: %d\n' %
960  (category, count))
961  sys.stdout.write('Total errors found: %d\n' % self.error_count)
962 
963 _cpplint_state = _CppLintState()
964 
965 
967  """Gets the module's output format."""
968  return _cpplint_state.output_format
969 
970 
971 def _SetOutputFormat(output_format):
972  """Sets the module's output format."""
973  _cpplint_state.SetOutputFormat(output_format)
974 
975 def _Quiet():
976  """Return's the module's quiet setting."""
977  return _cpplint_state.quiet
978 
979 def _SetQuiet(quiet):
980  """Set the module's quiet status, and return previous setting."""
981  return _cpplint_state.SetQuiet(quiet)
982 
983 
985  """Returns the module's verbosity setting."""
986  return _cpplint_state.verbose_level
987 
988 
989 def _SetVerboseLevel(level):
990  """Sets the module's verbosity, and returns the previous setting."""
991  return _cpplint_state.SetVerboseLevel(level)
992 
993 
994 def _SetCountingStyle(level):
995  """Sets the module's counting options."""
996  _cpplint_state.SetCountingStyle(level)
997 
998 
999 def _Filters():
1000  """Returns the module's list of output filters, as a list."""
1001  return _cpplint_state.filters
1002 
1003 
1004 def _SetFilters(filters):
1005  """Sets the module's error-message filters.
1006 
1007  These filters are applied when deciding whether to emit a given
1008  error message.
1009 
1010  Args:
1011  filters: A string of comma-separated filters (eg "whitespace/indent").
1012  Each filter should start with + or -; else we die.
1013  """
1014  _cpplint_state.SetFilters(filters)
1015 
1016 def _AddFilters(filters):
1017  """Adds more filter overrides.
1018 
1019  Unlike _SetFilters, this function does not reset the current list of filters
1020  available.
1021 
1022  Args:
1023  filters: A string of comma-separated filters (eg "whitespace/indent").
1024  Each filter should start with + or -; else we die.
1025  """
1026  _cpplint_state.AddFilters(filters)
1027 
1029  """ Saves the current filter list to backup storage."""
1030  _cpplint_state.BackupFilters()
1031 
1033  """ Restores filters previously backed up."""
1034  _cpplint_state.RestoreFilters()
1035 
1036 class _FunctionState(object):
1037  """Tracks current function name and the number of lines in its body."""
1038 
1039  _NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc.
1040  _TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER.
1041 
1042  def __init__(self):
1043  self.in_a_function = False
1046 
1047  def Begin(self, function_name):
1048  """Start analyzing function body.
1049 
1050  Args:
1051  function_name: The name of the function being tracked.
1052  """
1053  self.in_a_function = True
1054  self.lines_in_function = 0
1055  self.current_function = function_name
1056 
1057  def Count(self):
1058  """Count line in current function body."""
1059  if self.in_a_function:
1060  self.lines_in_function += 1
1061 
1062  def Check(self, error, filename, linenum):
1063  """Report if too many lines in function body.
1064 
1065  Args:
1066  error: The function to call with any errors found.
1067  filename: The name of the current file.
1068  linenum: The number of the line to check.
1069  """
1070  if not self.in_a_function:
1071  return
1072 
1073  if Match(r'T(EST|est)', self.current_function):
1074  base_trigger = self._TEST_TRIGGER
1075  else:
1076  base_trigger = self._NORMAL_TRIGGER
1077  trigger = base_trigger * 2**_VerboseLevel()
1078 
1079  if self.lines_in_function > trigger:
1080  error_level = int(math.log(self.lines_in_function / base_trigger, 2))
1081  # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
1082  if error_level > 5:
1083  error_level = 5
1084  error(filename, linenum, 'readability/fn_size', error_level,
1085  'Small and focused functions are preferred:'
1086  ' %s has %d non-comment lines'
1087  ' (error triggered by exceeding %d lines).' % (
1088  self.current_function, self.lines_in_function, trigger))
1089 
1090  def End(self):
1091  """Stop analyzing function body."""
1092  self.in_a_function = False
1093 
1094 
1095 class _IncludeError(Exception):
1096  """Indicates a problem with the include order in a file."""
1097  pass
1098 
1099 
1100 class FileInfo(object):
1101  """Provides utility functions for filenames.
1102 
1103  FileInfo provides easy access to the components of a file's path
1104  relative to the project root.
1105  """
1106 
1107  def __init__(self, filename):
1108  self._filename = filename
1109 
1110  def FullName(self):
1111  """Make Windows paths like Unix."""
1112  return os.path.abspath(self._filename).replace('\\', '/')
1113 
1114  def RepositoryName(self):
1115  """FullName after removing the local path to the repository.
1116 
1117  If we have a real absolute path name here we can try to do something smart:
1118  detecting the root of the checkout and truncating /path/to/checkout from
1119  the name so that we get header guards that don't include things like
1120  "C:\Documents and Settings\..." or "/home/username/..." in them and thus
1121  people on different computers who have checked the source out to different
1122  locations won't see bogus errors.
1123  """
1124  fullname = self.FullName()
1125 
1126  if os.path.exists(fullname):
1127  project_dir = os.path.dirname(fullname)
1128 
1129  if os.path.exists(os.path.join(project_dir, ".svn")):
1130  # If there's a .svn file in the current directory, we recursively look
1131  # up the directory tree for the top of the SVN checkout
1132  root_dir = project_dir
1133  one_up_dir = os.path.dirname(root_dir)
1134  while os.path.exists(os.path.join(one_up_dir, ".svn")):
1135  root_dir = os.path.dirname(root_dir)
1136  one_up_dir = os.path.dirname(one_up_dir)
1137 
1138  prefix = os.path.commonprefix([root_dir, project_dir])
1139  return fullname[len(prefix) + 1:]
1140 
1141  # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
1142  # searching up from the current path.
1143  root_dir = current_dir = os.path.dirname(fullname)
1144  while current_dir != os.path.dirname(current_dir):
1145  if (os.path.exists(os.path.join(current_dir, ".git")) or
1146  os.path.exists(os.path.join(current_dir, ".hg")) or
1147  os.path.exists(os.path.join(current_dir, ".svn"))):
1148  root_dir = current_dir
1149  current_dir = os.path.dirname(current_dir)
1150 
1151  if (os.path.exists(os.path.join(root_dir, ".git")) or
1152  os.path.exists(os.path.join(root_dir, ".hg")) or
1153  os.path.exists(os.path.join(root_dir, ".svn"))):
1154  prefix = os.path.commonprefix([root_dir, project_dir])
1155  return fullname[len(prefix) + 1:]
1156 
1157  # Don't know what to do; header guard warnings may be wrong...
1158  return fullname
1159 
1160  def Split(self):
1161  """Splits the file into the directory, basename, and extension.
1162 
1163  For 'chrome/browser/browser.cc', Split() would
1164  return ('chrome/browser', 'browser', '.cc')
1165 
1166  Returns:
1167  A tuple of (directory, basename, extension).
1168  """
1169 
1170  googlename = self.RepositoryName()
1171  project, rest = os.path.split(googlename)
1172  return (project,) + os.path.splitext(rest)
1173 
1174  def BaseName(self):
1175  """File base name - text after the final slash, before the final period."""
1176  return self.Split()[1]
1177 
1178  def Extension(self):
1179  """File extension - text following the final period."""
1180  return self.Split()[2]
1181 
1182  def NoExtension(self):
1183  """File has no source file extension."""
1184  return '/'.join(self.Split()[0:2])
1185 
1186  def IsSource(self):
1187  """File has a source file extension."""
1188  return _IsSourceExtension(self.Extension()[1:])
1189 
1190 
1191 def _ShouldPrintError(category, confidence, linenum):
1192  """If confidence >= verbose, category passes filter and is not suppressed."""
1193 
1194  # There are three ways we might decide not to print an error message:
1195  # a "NOLINT(category)" comment appears in the source,
1196  # the verbosity level isn't high enough, or the filters filter it out.
1197  if IsErrorSuppressedByNolint(category, linenum):
1198  return False
1199 
1200  if confidence < _cpplint_state.verbose_level:
1201  return False
1202 
1203  is_filtered = False
1204  for one_filter in _Filters():
1205  if one_filter.startswith('-'):
1206  if category.startswith(one_filter[1:]):
1207  is_filtered = True
1208  elif one_filter.startswith('+'):
1209  if category.startswith(one_filter[1:]):
1210  is_filtered = False
1211  else:
1212  assert False # should have been checked for in SetFilter.
1213  if is_filtered:
1214  return False
1215 
1216  return True
1217 
1218 
1219 def Error(filename, linenum, category, confidence, message):
1220  """Logs the fact we've found a lint error.
1221 
1222  We log where the error was found, and also our confidence in the error,
1223  that is, how certain we are this is a legitimate style regression, and
1224  not a misidentification or a use that's sometimes justified.
1225 
1226  False positives can be suppressed by the use of
1227  "cpplint(category)" comments on the offending line. These are
1228  parsed into _error_suppressions.
1229 
1230  Args:
1231  filename: The name of the file containing the error.
1232  linenum: The number of the line containing the error.
1233  category: A string used to describe the "category" this bug
1234  falls under: "whitespace", say, or "runtime". Categories
1235  may have a hierarchy separated by slashes: "whitespace/indent".
1236  confidence: A number from 1-5 representing a confidence score for
1237  the error, with 5 meaning that we are certain of the problem,
1238  and 1 meaning that it could be a legitimate construct.
1239  message: The error message.
1240  """
1241  if _ShouldPrintError(category, confidence, linenum):
1242  _cpplint_state.IncrementErrorCount(category)
1243  if _cpplint_state.output_format == 'vs7':
1244  sys.stderr.write('%s(%s): error cpplint: [%s] %s [%d]\n' % (
1245  filename, linenum, category, message, confidence))
1246  elif _cpplint_state.output_format == 'eclipse':
1247  sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % (
1248  filename, linenum, message, category, confidence))
1249  else:
1250  sys.stderr.write('%s:%s: %s [%s] [%d]\n' % (
1251  filename, linenum, message, category, confidence))
1252 
1253 
1254 # Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard.
1255 _RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile(
1256  r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)')
1257 # Match a single C style comment on the same line.
1258 _RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/'
1259 # Matches multi-line C style comments.
1260 # This RE is a little bit more complicated than one might expect, because we
1261 # have to take care of space removals tools so we can handle comments inside
1262 # statements better.
1263 # The current rule is: We only clear spaces from both sides when we're at the
1264 # end of the line. Otherwise, we try to remove spaces from the right side,
1265 # if this doesn't work we try on left side but only if there's a non-character
1266 # on the right.
1267 _RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile(
1268  r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' +
1269  _RE_PATTERN_C_COMMENTS + r'\s+|' +
1270  r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' +
1271  _RE_PATTERN_C_COMMENTS + r')')
1272 
1273 
1274 def IsCppString(line):
1275  """Does line terminate so, that the next symbol is in string constant.
1276 
1277  This function does not consider single-line nor multi-line comments.
1278 
1279  Args:
1280  line: is a partial line of code starting from the 0..n.
1281 
1282  Returns:
1283  True, if next character appended to 'line' is inside a
1284  string constant.
1285  """
1286 
1287  line = line.replace(r'\\', 'XX') # after this, \\" does not match to \"
1288  return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1
1289 
1290 
1291 def CleanseRawStrings(raw_lines):
1292  """Removes C++11 raw strings from lines.
1293 
1294  Before:
1295  static const char kData[] = R"(
1296  multi-line string
1297  )";
1298 
1299  After:
1300  static const char kData[] = ""
1301  (replaced by blank line)
1302  "";
1303 
1304  Args:
1305  raw_lines: list of raw lines.
1306 
1307  Returns:
1308  list of lines with C++11 raw strings replaced by empty strings.
1309  """
1310 
1311  delimiter = None
1312  lines_without_raw_strings = []
1313  for line in raw_lines:
1314  if delimiter:
1315  # Inside a raw string, look for the end
1316  end = line.find(delimiter)
1317  if end >= 0:
1318  # Found the end of the string, match leading space for this
1319  # line and resume copying the original lines, and also insert
1320  # a "" on the last line.
1321  leading_space = Match(r'^(\s*)\S', line)
1322  line = leading_space.group(1) + '""' + line[end + len(delimiter):]
1323  delimiter = None
1324  else:
1325  # Haven't found the end yet, append a blank line.
1326  line = '""'
1327 
1328  # Look for beginning of a raw string, and replace them with
1329  # empty strings. This is done in a loop to handle multiple raw
1330  # strings on the same line.
1331  while delimiter is None:
1332  # Look for beginning of a raw string.
1333  # See 2.14.15 [lex.string] for syntax.
1334  #
1335  # Once we have matched a raw string, we check the prefix of the
1336  # line to make sure that the line is not part of a single line
1337  # comment. It's done this way because we remove raw strings
1338  # before removing comments as opposed to removing comments
1339  # before removing raw strings. This is because there are some
1340  # cpplint checks that requires the comments to be preserved, but
1341  # we don't want to check comments that are inside raw strings.
1342  matched = Match(r'^(.*?)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line)
1343  if (matched and
1344  not Match(r'^([^\'"]|\'(\\.|[^\'])*\'|"(\\.|[^"])*")*//',
1345  matched.group(1))):
1346  delimiter = ')' + matched.group(2) + '"'
1347 
1348  end = matched.group(3).find(delimiter)
1349  if end >= 0:
1350  # Raw string ended on same line
1351  line = (matched.group(1) + '""' +
1352  matched.group(3)[end + len(delimiter):])
1353  delimiter = None
1354  else:
1355  # Start of a multi-line raw string
1356  line = matched.group(1) + '""'
1357  else:
1358  break
1359 
1360  lines_without_raw_strings.append(line)
1361 
1362  # TODO(unknown): if delimiter is not None here, we might want to
1363  # emit a warning for unterminated string.
1364  return lines_without_raw_strings
1365 
1366 
1368  """Find the beginning marker for a multiline comment."""
1369  while lineix < len(lines):
1370  if lines[lineix].strip().startswith('/*'):
1371  # Only return this marker if the comment goes beyond this line
1372  if lines[lineix].strip().find('*/', 2) < 0:
1373  return lineix
1374  lineix += 1
1375  return len(lines)
1376 
1377 
1378 def FindNextMultiLineCommentEnd(lines, lineix):
1379  """We are inside a comment, find the end marker."""
1380  while lineix < len(lines):
1381  if lines[lineix].strip().endswith('*/'):
1382  return lineix
1383  lineix += 1
1384  return len(lines)
1385 
1386 
1387 def RemoveMultiLineCommentsFromRange(lines, begin, end):
1388  """Clears a range of lines for multi-line comments."""
1389  # Having // dummy comments makes the lines non-empty, so we will not get
1390  # unnecessary blank line warnings later in the code.
1391  for i in range(begin, end):
1392  lines[i] = '/**/'
1393 
1394 
1395 def RemoveMultiLineComments(filename, lines, error):
1396  """Removes multiline (c-style) comments from lines."""
1397  lineix = 0
1398  while lineix < len(lines):
1399  lineix_begin = FindNextMultiLineCommentStart(lines, lineix)
1400  if lineix_begin >= len(lines):
1401  return
1402  lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin)
1403  if lineix_end >= len(lines):
1404  error(filename, lineix_begin + 1, 'readability/multiline_comment', 5,
1405  'Could not find end of multi-line comment')
1406  return
1407  RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1)
1408  lineix = lineix_end + 1
1409 
1410 
1412  """Removes //-comments and single-line C-style /* */ comments.
1413 
1414  Args:
1415  line: A line of C++ source.
1416 
1417  Returns:
1418  The line with single-line comments removed.
1419  """
1420  commentpos = line.find('//')
1421  if commentpos != -1 and not IsCppString(line[:commentpos]):
1422  line = line[:commentpos].rstrip()
1423  # get rid of /* ... */
1424  return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line)
1425 
1426 
1427 class CleansedLines(object):
1428  """Holds 4 copies of all lines with different preprocessing applied to them.
1429 
1430  1) elided member contains lines without strings and comments.
1431  2) lines member contains lines without comments.
1432  3) raw_lines member contains all the lines without processing.
1433  4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw
1434  strings removed.
1435  All these members are of <type 'list'>, and of the same length.
1436  """
1437 
1438  def __init__(self, lines):
1439  self.elided = []
1440  self.lines = []
1441  self.raw_lines = lines
1442  self.num_lines = len(lines)
1444  for linenum in range(len(self.lines_without_raw_strings)):
1445  self.lines.append(CleanseComments(
1446  self.lines_without_raw_strings[linenum]))
1447  elided = self._CollapseStrings(self.lines_without_raw_strings[linenum])
1448  self.elided.append(CleanseComments(elided))
1449 
1450  def NumLines(self):
1451  """Returns the number of lines represented."""
1452  return self.num_lines
1453 
1454  @staticmethod
1455  def _CollapseStrings(elided):
1456  """Collapses strings and chars on a line to simple "" or '' blocks.
1457 
1458  We nix strings first so we're not fooled by text like '"http://"'
1459 
1460  Args:
1461  elided: The line being processed.
1462 
1463  Returns:
1464  The line with collapsed strings.
1465  """
1466  if _RE_PATTERN_INCLUDE.match(elided):
1467  return elided
1468 
1469  # Remove escaped characters first to make quote/single quote collapsing
1470  # basic. Things that look like escaped characters shouldn't occur
1471  # outside of strings and chars.
1472  elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided)
1473 
1474  # Replace quoted strings and digit separators. Both single quotes
1475  # and double quotes are processed in the same loop, otherwise
1476  # nested quotes wouldn't work.
1477  collapsed = ''
1478  while True:
1479  # Find the first quote character
1480  match = Match(r'^([^\'"]*)([\'"])(.*)$', elided)
1481  if not match:
1482  collapsed += elided
1483  break
1484  head, quote, tail = match.groups()
1485 
1486  if quote == '"':
1487  # Collapse double quoted strings
1488  second_quote = tail.find('"')
1489  if second_quote >= 0:
1490  collapsed += head + '""'
1491  elided = tail[second_quote + 1:]
1492  else:
1493  # Unmatched double quote, don't bother processing the rest
1494  # of the line since this is probably a multiline string.
1495  collapsed += elided
1496  break
1497  else:
1498  # Found single quote, check nearby text to eliminate digit separators.
1499  #
1500  # There is no special handling for floating point here, because
1501  # the integer/fractional/exponent parts would all be parsed
1502  # correctly as long as there are digits on both sides of the
1503  # separator. So we are fine as long as we don't see something
1504  # like "0.'3" (gcc 4.9.0 will not allow this literal).
1505  if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head):
1506  match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail)
1507  collapsed += head + match_literal.group(1).replace("'", '')
1508  elided = match_literal.group(2)
1509  else:
1510  second_quote = tail.find('\'')
1511  if second_quote >= 0:
1512  collapsed += head + "''"
1513  elided = tail[second_quote + 1:]
1514  else:
1515  # Unmatched single quote
1516  collapsed += elided
1517  break
1518 
1519  return collapsed
1520 
1521 
1522 def FindEndOfExpressionInLine(line, startpos, stack):
1523  """Find the position just after the end of current parenthesized expression.
1524 
1525  Args:
1526  line: a CleansedLines line.
1527  startpos: start searching at this position.
1528  stack: nesting stack at startpos.
1529 
1530  Returns:
1531  On finding matching end: (index just after matching end, None)
1532  On finding an unclosed expression: (-1, None)
1533  Otherwise: (-1, new stack at end of this line)
1534  """
1535  for i in xrange(startpos, len(line)):
1536  char = line[i]
1537  if char in '([{':
1538  # Found start of parenthesized expression, push to expression stack
1539  stack.append(char)
1540  elif char == '<':
1541  # Found potential start of template argument list
1542  if i > 0 and line[i - 1] == '<':
1543  # Left shift operator
1544  if stack and stack[-1] == '<':
1545  stack.pop()
1546  if not stack:
1547  return (-1, None)
1548  elif i > 0 and Search(r'\boperator\s*$', line[0:i]):
1549  # operator<, don't add to stack
1550  continue
1551  else:
1552  # Tentative start of template argument list
1553  stack.append('<')
1554  elif char in ')]}':
1555  # Found end of parenthesized expression.
1556  #
1557  # If we are currently expecting a matching '>', the pending '<'
1558  # must have been an operator. Remove them from expression stack.
1559  while stack and stack[-1] == '<':
1560  stack.pop()
1561  if not stack:
1562  return (-1, None)
1563  if ((stack[-1] == '(' and char == ')') or
1564  (stack[-1] == '[' and char == ']') or
1565  (stack[-1] == '{' and char == '}')):
1566  stack.pop()
1567  if not stack:
1568  return (i + 1, None)
1569  else:
1570  # Mismatched parentheses
1571  return (-1, None)
1572  elif char == '>':
1573  # Found potential end of template argument list.
1574 
1575  # Ignore "->" and operator functions
1576  if (i > 0 and
1577  (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))):
1578  continue
1579 
1580  # Pop the stack if there is a matching '<'. Otherwise, ignore
1581  # this '>' since it must be an operator.
1582  if stack:
1583  if stack[-1] == '<':
1584  stack.pop()
1585  if not stack:
1586  return (i + 1, None)
1587  elif char == ';':
1588  # Found something that look like end of statements. If we are currently
1589  # expecting a '>', the matching '<' must have been an operator, since
1590  # template argument list should not contain statements.
1591  while stack and stack[-1] == '<':
1592  stack.pop()
1593  if not stack:
1594  return (-1, None)
1595 
1596  # Did not find end of expression or unbalanced parentheses on this line
1597  return (-1, stack)
1598 
1599 
1600 def CloseExpression(clean_lines, linenum, pos):
1601  """If input points to ( or { or [ or <, finds the position that closes it.
1602 
1603  If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
1604  linenum/pos that correspond to the closing of the expression.
1605 
1606  TODO(unknown): cpplint spends a fair bit of time matching parentheses.
1607  Ideally we would want to index all opening and closing parentheses once
1608  and have CloseExpression be just a simple lookup, but due to preprocessor
1609  tricks, this is not so easy.
1610 
1611  Args:
1612  clean_lines: A CleansedLines instance containing the file.
1613  linenum: The number of the line to check.
1614  pos: A position on the line.
1615 
1616  Returns:
1617  A tuple (line, linenum, pos) pointer *past* the closing brace, or
1618  (line, len(lines), -1) if we never find a close. Note we ignore
1619  strings and comments when matching; and the line we return is the
1620  'cleansed' line at linenum.
1621  """
1622 
1623  line = clean_lines.elided[linenum]
1624  if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]):
1625  return (line, clean_lines.NumLines(), -1)
1626 
1627  # Check first line
1628  (end_pos, stack) = FindEndOfExpressionInLine(line, pos, [])
1629  if end_pos > -1:
1630  return (line, linenum, end_pos)
1631 
1632  # Continue scanning forward
1633  while stack and linenum < clean_lines.NumLines() - 1:
1634  linenum += 1
1635  line = clean_lines.elided[linenum]
1636  (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack)
1637  if end_pos > -1:
1638  return (line, linenum, end_pos)
1639 
1640  # Did not find end of expression before end of file, give up
1641  return (line, clean_lines.NumLines(), -1)
1642 
1643 
1644 def FindStartOfExpressionInLine(line, endpos, stack):
1645  """Find position at the matching start of current expression.
1646 
1647  This is almost the reverse of FindEndOfExpressionInLine, but note
1648  that the input position and returned position differs by 1.
1649 
1650  Args:
1651  line: a CleansedLines line.
1652  endpos: start searching at this position.
1653  stack: nesting stack at endpos.
1654 
1655  Returns:
1656  On finding matching start: (index at matching start, None)
1657  On finding an unclosed expression: (-1, None)
1658  Otherwise: (-1, new stack at beginning of this line)
1659  """
1660  i = endpos
1661  while i >= 0:
1662  char = line[i]
1663  if char in ')]}':
1664  # Found end of expression, push to expression stack
1665  stack.append(char)
1666  elif char == '>':
1667  # Found potential end of template argument list.
1668  #
1669  # Ignore it if it's a "->" or ">=" or "operator>"
1670  if (i > 0 and
1671  (line[i - 1] == '-' or
1672  Match(r'\s>=\s', line[i - 1:]) or
1673  Search(r'\boperator\s*$', line[0:i]))):
1674  i -= 1
1675  else:
1676  stack.append('>')
1677  elif char == '<':
1678  # Found potential start of template argument list
1679  if i > 0 and line[i - 1] == '<':
1680  # Left shift operator
1681  i -= 1
1682  else:
1683  # If there is a matching '>', we can pop the expression stack.
1684  # Otherwise, ignore this '<' since it must be an operator.
1685  if stack and stack[-1] == '>':
1686  stack.pop()
1687  if not stack:
1688  return (i, None)
1689  elif char in '([{':
1690  # Found start of expression.
1691  #
1692  # If there are any unmatched '>' on the stack, they must be
1693  # operators. Remove those.
1694  while stack and stack[-1] == '>':
1695  stack.pop()
1696  if not stack:
1697  return (-1, None)
1698  if ((char == '(' and stack[-1] == ')') or
1699  (char == '[' and stack[-1] == ']') or
1700  (char == '{' and stack[-1] == '}')):
1701  stack.pop()
1702  if not stack:
1703  return (i, None)
1704  else:
1705  # Mismatched parentheses
1706  return (-1, None)
1707  elif char == ';':
1708  # Found something that look like end of statements. If we are currently
1709  # expecting a '<', the matching '>' must have been an operator, since
1710  # template argument list should not contain statements.
1711  while stack and stack[-1] == '>':
1712  stack.pop()
1713  if not stack:
1714  return (-1, None)
1715 
1716  i -= 1
1717 
1718  return (-1, stack)
1719 
1720 
1721 def ReverseCloseExpression(clean_lines, linenum, pos):
1722  """If input points to ) or } or ] or >, finds the position that opens it.
1723 
1724  If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the
1725  linenum/pos that correspond to the opening of the expression.
1726 
1727  Args:
1728  clean_lines: A CleansedLines instance containing the file.
1729  linenum: The number of the line to check.
1730  pos: A position on the line.
1731 
1732  Returns:
1733  A tuple (line, linenum, pos) pointer *at* the opening brace, or
1734  (line, 0, -1) if we never find the matching opening brace. Note
1735  we ignore strings and comments when matching; and the line we
1736  return is the 'cleansed' line at linenum.
1737  """
1738  line = clean_lines.elided[linenum]
1739  if line[pos] not in ')}]>':
1740  return (line, 0, -1)
1741 
1742  # Check last line
1743  (start_pos, stack) = FindStartOfExpressionInLine(line, pos, [])
1744  if start_pos > -1:
1745  return (line, linenum, start_pos)
1746 
1747  # Continue scanning backward
1748  while stack and linenum > 0:
1749  linenum -= 1
1750  line = clean_lines.elided[linenum]
1751  (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack)
1752  if start_pos > -1:
1753  return (line, linenum, start_pos)
1754 
1755  # Did not find start of expression before beginning of file, give up
1756  return (line, 0, -1)
1757 
1758 
1759 def CheckForCopyright(filename, lines, error):
1760  """Logs an error if no Copyright message appears at the top of the file."""
1761 
1762  # We'll say it should occur by line 10. Don't forget there's a
1763  # dummy line at the front.
1764  for line in xrange(1, min(len(lines), 11)):
1765  if re.search(r'Copyright', lines[line], re.I): break
1766  else: # means no copyright line was found
1767  error(filename, 0, 'legal/copyright', 5,
1768  'No copyright message found. '
1769  'You should have a line: "Copyright [year] <Copyright Owner>"')
1770 
1771 
1772 def GetIndentLevel(line):
1773  """Return the number of leading spaces in line.
1774 
1775  Args:
1776  line: A string to check.
1777 
1778  Returns:
1779  An integer count of leading spaces, possibly zero.
1780  """
1781  indent = Match(r'^( *)\S', line)
1782  if indent:
1783  return len(indent.group(1))
1784  else:
1785  return 0
1786 
1788  """Returns the path split into a list by the separator.
1789 
1790  Args:
1791  path: An absolute or relative path (e.g. '/a/b/c/' or '../a')
1792 
1793  Returns:
1794  A list of path components (e.g. ['a', 'b', 'c]).
1795  """
1796  lst = []
1797  while True:
1798  (head, tail) = os.path.split(path)
1799  if head == path: # absolute paths end
1800  lst.append(head)
1801  break
1802  if tail == path: # relative paths end
1803  lst.append(tail)
1804  break
1805 
1806  path = head
1807  lst.append(tail)
1808 
1809  lst.reverse()
1810  return lst
1811 
1813  """Returns the CPP variable that should be used as a header guard.
1814 
1815  Args:
1816  filename: The name of a C++ header file.
1817 
1818  Returns:
1819  The CPP variable that should be used as a header guard in the
1820  named file.
1821 
1822  """
1823 
1824  # Restores original filename in case that cpplint is invoked from Emacs's
1825  # flymake.
1826  filename = re.sub(r'_flymake\.h$', '.h', filename)
1827  filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename)
1828  # Replace 'c++' with 'cpp'.
1829  filename = filename.replace('C++', 'cpp').replace('c++', 'cpp')
1830 
1831  fileinfo = FileInfo(filename)
1832  file_path_from_root = fileinfo.RepositoryName()
1833 
1834  def FixupPathFromRoot():
1835  if _root_debug:
1836  sys.stderr.write("\n_root fixup, _root = '%s', repository name = '%s'\n"
1837  %(_root, fileinfo.RepositoryName()))
1838 
1839  # Process the file path with the --root flag if it was set.
1840  if not _root:
1841  if _root_debug:
1842  sys.stderr.write("_root unspecified\n")
1843  return file_path_from_root
1844 
1845  def StripListPrefix(lst, prefix):
1846  # f(['x', 'y'], ['w, z']) -> None (not a valid prefix)
1847  if lst[:len(prefix)] != prefix:
1848  return None
1849  # f(['a, 'b', 'c', 'd'], ['a', 'b']) -> ['c', 'd']
1850  return lst[(len(prefix)):]
1851 
1852  # root behavior:
1853  # --root=subdir , lstrips subdir from the header guard
1854  maybe_path = StripListPrefix(PathSplitToList(file_path_from_root),
1855  PathSplitToList(_root))
1856 
1857  if _root_debug:
1858  sys.stderr.write(("_root lstrip (maybe_path=%s, file_path_from_root=%s," +
1859  " _root=%s)\n") %(maybe_path, file_path_from_root, _root))
1860 
1861  if maybe_path:
1862  return os.path.join(*maybe_path)
1863 
1864  # --root=.. , will prepend the outer directory to the header guard
1865  full_path = fileinfo.FullName()
1866  root_abspath = os.path.abspath(_root)
1867 
1868  maybe_path = StripListPrefix(PathSplitToList(full_path),
1869  PathSplitToList(root_abspath))
1870 
1871  if _root_debug:
1872  sys.stderr.write(("_root prepend (maybe_path=%s, full_path=%s, " +
1873  "root_abspath=%s)\n") %(maybe_path, full_path, root_abspath))
1874 
1875  if maybe_path:
1876  return os.path.join(*maybe_path)
1877 
1878  if _root_debug:
1879  sys.stderr.write("_root ignore, returning %s\n" %(file_path_from_root))
1880 
1881  # --root=FAKE_DIR is ignored
1882  return file_path_from_root
1883 
1884  file_path_from_root = FixupPathFromRoot()
1885  return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
1886 
1887 
1888 def CheckForHeaderGuard(filename, clean_lines, error):
1889  """Checks that the file contains a header guard.
1890 
1891  Logs an error if no #ifndef header guard is present. For other
1892  headers, checks that the full pathname is used.
1893 
1894  Args:
1895  filename: The name of the C++ header file.
1896  clean_lines: A CleansedLines instance containing the file.
1897  error: The function to call with any errors found.
1898  """
1899 
1900  # Don't check for header guards if there are error suppression
1901  # comments somewhere in this file.
1902  #
1903  # Because this is silencing a warning for a nonexistent line, we
1904  # only support the very specific NOLINT(build/header_guard) syntax,
1905  # and not the general NOLINT or NOLINT(*) syntax.
1906  raw_lines = clean_lines.lines_without_raw_strings
1907  for i in raw_lines:
1908  if Search(r'//\s*NOLINT\(build/header_guard\)', i):
1909  return
1910 
1911  cppvar = GetHeaderGuardCPPVariable(filename)
1912 
1913  ifndef = ''
1914  ifndef_linenum = 0
1915  define = ''
1916  endif = ''
1917  endif_linenum = 0
1918  for linenum, line in enumerate(raw_lines):
1919  linesplit = line.split()
1920  if len(linesplit) >= 2:
1921  # find the first occurrence of #ifndef and #define, save arg
1922  if not ifndef and linesplit[0] == '#ifndef':
1923  # set ifndef to the header guard presented on the #ifndef line.
1924  ifndef = linesplit[1]
1925  ifndef_linenum = linenum
1926  if not define and linesplit[0] == '#define':
1927  define = linesplit[1]
1928  # find the last occurrence of #endif, save entire line
1929  if line.startswith('#endif'):
1930  endif = line
1931  endif_linenum = linenum
1932 
1933  if not ifndef or not define or ifndef != define:
1934  error(filename, 0, 'build/header_guard', 5,
1935  'No #ifndef header guard found, suggested CPP variable is: %s' %
1936  cppvar)
1937  return
1938 
1939  # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__
1940  # for backward compatibility.
1941  if ifndef != cppvar:
1942  error_level = 0
1943  if ifndef != cppvar + '_':
1944  error_level = 5
1945 
1946  ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum,
1947  error)
1948  error(filename, ifndef_linenum, 'build/header_guard', error_level,
1949  '#ifndef header guard has wrong style, please use: %s' % cppvar)
1950 
1951  # Check for "//" comments on endif line.
1952  ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum,
1953  error)
1954  match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif)
1955  if match:
1956  if match.group(1) == '_':
1957  # Issue low severity warning for deprecated double trailing underscore
1958  error(filename, endif_linenum, 'build/header_guard', 0,
1959  '#endif line should be "#endif // %s"' % cppvar)
1960  return
1961 
1962  # Didn't find the corresponding "//" comment. If this file does not
1963  # contain any "//" comments at all, it could be that the compiler
1964  # only wants "/**/" comments, look for those instead.
1965  no_single_line_comments = True
1966  for i in xrange(1, len(raw_lines) - 1):
1967  line = raw_lines[i]
1968  if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line):
1969  no_single_line_comments = False
1970  break
1971 
1972  if no_single_line_comments:
1973  match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif)
1974  if match:
1975  if match.group(1) == '_':
1976  # Low severity warning for double trailing underscore
1977  error(filename, endif_linenum, 'build/header_guard', 0,
1978  '#endif line should be "#endif /* %s */"' % cppvar)
1979  return
1980 
1981  # Didn't find anything
1982  error(filename, endif_linenum, 'build/header_guard', 5,
1983  '#endif line should be "#endif // %s"' % cppvar)
1984 
1985 
1986 def CheckHeaderFileIncluded(filename, include_state, error):
1987  """Logs an error if a .cc file does not include its header."""
1988 
1989  # Do not check test files
1990  fileinfo = FileInfo(filename)
1991  if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()):
1992  return
1993 
1994  headerfile = filename[0:len(filename) - len(fileinfo.Extension())] + '.h'
1995  if not os.path.exists(headerfile):
1996  return
1997  headername = FileInfo(headerfile).RepositoryName()
1998  first_include = 0
1999  for section_list in include_state.include_list:
2000  for f in section_list:
2001  if headername in f[0] or f[0] in headername:
2002  return
2003  if not first_include:
2004  first_include = f[1]
2005 
2006  error(filename, first_include, 'build/include', 5,
2007  '%s should include its header file %s' % (fileinfo.RepositoryName(),
2008  headername))
2009 
2010 
2011 def CheckForBadCharacters(filename, lines, error):
2012  """Logs an error for each line containing bad characters.
2013 
2014  Two kinds of bad characters:
2015 
2016  1. Unicode replacement characters: These indicate that either the file
2017  contained invalid UTF-8 (likely) or Unicode replacement characters (which
2018  it shouldn't). Note that it's possible for this to throw off line
2019  numbering if the invalid UTF-8 occurred adjacent to a newline.
2020 
2021  2. NUL bytes. These are problematic for some tools.
2022 
2023  Args:
2024  filename: The name of the current file.
2025  lines: An array of strings, each representing a line of the file.
2026  error: The function to call with any errors found.
2027  """
2028  for linenum, line in enumerate(lines):
2029  if u'\ufffd' in line:
2030  error(filename, linenum, 'readability/utf8', 5,
2031  'Line contains invalid UTF-8 (or Unicode replacement character).')
2032  if '\0' in line:
2033  error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
2034 
2035 
2036 def CheckForNewlineAtEOF(filename, lines, error):
2037  """Logs an error if there is no newline char at the end of the file.
2038 
2039  Args:
2040  filename: The name of the current file.
2041  lines: An array of strings, each representing a line of the file.
2042  error: The function to call with any errors found.
2043  """
2044 
2045  # The array lines() was created by adding two newlines to the
2046  # original file (go figure), then splitting on \n.
2047  # To verify that the file ends in \n, we just have to make sure the
2048  # last-but-two element of lines() exists and is empty.
2049  if len(lines) < 3 or lines[-2]:
2050  error(filename, len(lines) - 2, 'whitespace/ending_newline', 5,
2051  'Could not find a newline character at the end of the file.')
2052 
2053 
2054 def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error):
2055  """Logs an error if we see /* ... */ or "..." that extend past one line.
2056 
2057  /* ... */ comments are legit inside macros, for one line.
2058  Otherwise, we prefer // comments, so it's ok to warn about the
2059  other. Likewise, it's ok for strings to extend across multiple
2060  lines, as long as a line continuation character (backslash)
2061  terminates each line. Although not currently prohibited by the C++
2062  style guide, it's ugly and unnecessary. We don't do well with either
2063  in this lint program, so we warn about both.
2064 
2065  Args:
2066  filename: The name of the current file.
2067  clean_lines: A CleansedLines instance containing the file.
2068  linenum: The number of the line to check.
2069  error: The function to call with any errors found.
2070  """
2071  line = clean_lines.elided[linenum]
2072 
2073  # Remove all \\ (escaped backslashes) from the line. They are OK, and the
2074  # second (escaped) slash may trigger later \" detection erroneously.
2075  line = line.replace('\\\\', '')
2076 
2077  if line.count('/*') > line.count('*/'):
2078  error(filename, linenum, 'readability/multiline_comment', 5,
2079  'Complex multi-line /*...*/-style comment found. '
2080  'Lint may give bogus warnings. '
2081  'Consider replacing these with //-style comments, '
2082  'with #if 0...#endif, '
2083  'or with more clearly structured multi-line comments.')
2084 
2085  if (line.count('"') - line.count('\\"')) % 2:
2086  error(filename, linenum, 'readability/multiline_string', 5,
2087  'Multi-line string ("...") found. This lint script doesn\'t '
2088  'do well with such strings, and may give bogus warnings. '
2089  'Use C++11 raw strings or concatenation instead.')
2090 
2091 
2092 # (non-threadsafe name, thread-safe alternative, validation pattern)
2093 #
2094 # The validation pattern is used to eliminate false positives such as:
2095 # _rand(); // false positive due to substring match.
2096 # ->rand(); // some member function rand().
2097 # ACMRandom rand(seed); // some variable named rand.
2098 # ISAACRandom rand(); // another variable named rand.
2099 #
2100 # Basically we require the return value of these functions to be used
2101 # in some expression context on the same line by matching on some
2102 # operator before the function name. This eliminates constructors and
2103 # member function calls.
2104 _UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)'
2105 _THREADING_LIST = (
2106  ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'),
2107  ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'),
2108  ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'),
2109  ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'),
2110  ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'),
2111  ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'),
2112  ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'),
2113  ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'),
2114  ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'),
2115  ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'),
2116  ('strtok(', 'strtok_r(',
2117  _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'),
2118  ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'),
2119  )
2120 
2121 
2122 def CheckPosixThreading(filename, clean_lines, linenum, error):
2123  """Checks for calls to thread-unsafe functions.
2124 
2125  Much code has been originally written without consideration of
2126  multi-threading. Also, engineers are relying on their old experience;
2127  they have learned posix before threading extensions were added. These
2128  tests guide the engineers to use thread-safe functions (when using
2129  posix directly).
2130 
2131  Args:
2132  filename: The name of the current file.
2133  clean_lines: A CleansedLines instance containing the file.
2134  linenum: The number of the line to check.
2135  error: The function to call with any errors found.
2136  """
2137  line = clean_lines.elided[linenum]
2138  for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST:
2139  # Additional pattern matching check to confirm that this is the
2140  # function we are looking for
2141  if Search(pattern, line):
2142  error(filename, linenum, 'runtime/threadsafe_fn', 2,
2143  'Consider using ' + multithread_safe_func +
2144  '...) instead of ' + single_thread_func +
2145  '...) for improved thread safety.')
2146 
2147 
2148 def CheckVlogArguments(filename, clean_lines, linenum, error):
2149  """Checks that VLOG() is only used for defining a logging level.
2150 
2151  For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and
2152  VLOG(FATAL) are not.
2153 
2154  Args:
2155  filename: The name of the current file.
2156  clean_lines: A CleansedLines instance containing the file.
2157  linenum: The number of the line to check.
2158  error: The function to call with any errors found.
2159  """
2160  line = clean_lines.elided[linenum]
2161  if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line):
2162  error(filename, linenum, 'runtime/vlog', 5,
2163  'VLOG() should be used with numeric verbosity level. '
2164  'Use LOG() if you want symbolic severity levels.')
2165 
2166 # Matches invalid increment: *count++, which moves pointer instead of
2167 # incrementing a value.
2168 _RE_PATTERN_INVALID_INCREMENT = re.compile(
2169  r'^\s*\*\w+(\+\+|--);')
2170 
2171 
2172 def CheckInvalidIncrement(filename, clean_lines, linenum, error):
2173  """Checks for invalid increment *count++.
2174 
2175  For example following function:
2176  void increment_counter(int* count) {
2177  *count++;
2178  }
2179  is invalid, because it effectively does count++, moving pointer, and should
2180  be replaced with ++*count, (*count)++ or *count += 1.
2181 
2182  Args:
2183  filename: The name of the current file.
2184  clean_lines: A CleansedLines instance containing the file.
2185  linenum: The number of the line to check.
2186  error: The function to call with any errors found.
2187  """
2188  line = clean_lines.elided[linenum]
2189  if _RE_PATTERN_INVALID_INCREMENT.match(line):
2190  error(filename, linenum, 'runtime/invalid_increment', 5,
2191  'Changing pointer instead of value (or unused value of operator*).')
2192 
2193 
2194 def IsMacroDefinition(clean_lines, linenum):
2195  if Search(r'^#define', clean_lines[linenum]):
2196  return True
2197 
2198  if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]):
2199  return True
2200 
2201  return False
2202 
2203 
2204 def IsForwardClassDeclaration(clean_lines, linenum):
2205  return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum])
2206 
2207 
2208 class _BlockInfo(object):
2209  """Stores information about a generic block of code."""
2210 
2211  def __init__(self, linenum, seen_open_brace):
2212  self.starting_linenum = linenum
2213  self.seen_open_brace = seen_open_brace
2215  self.inline_asm = _NO_ASM
2217 
2218  def CheckBegin(self, filename, clean_lines, linenum, error):
2219  """Run checks that applies to text up to the opening brace.
2220 
2221  This is mostly for checking the text after the class identifier
2222  and the "{", usually where the base class is specified. For other
2223  blocks, there isn't much to check, so we always pass.
2224 
2225  Args:
2226  filename: The name of the current file.
2227  clean_lines: A CleansedLines instance containing the file.
2228  linenum: The number of the line to check.
2229  error: The function to call with any errors found.
2230  """
2231  pass
2232 
2233  def CheckEnd(self, filename, clean_lines, linenum, error):
2234  """Run checks that applies to text after the closing brace.
2235 
2236  This is mostly used for checking end of namespace comments.
2237 
2238  Args:
2239  filename: The name of the current file.
2240  clean_lines: A CleansedLines instance containing the file.
2241  linenum: The number of the line to check.
2242  error: The function to call with any errors found.
2243  """
2244  pass
2245 
2246  def IsBlockInfo(self):
2247  """Returns true if this block is a _BlockInfo.
2248 
2249  This is convenient for verifying that an object is an instance of
2250  a _BlockInfo, but not an instance of any of the derived classes.
2251 
2252  Returns:
2253  True for this class, False for derived classes.
2254  """
2255  return self.__class__ == _BlockInfo
2256 
2257 
2259  """Stores information about an 'extern "C"' block."""
2260 
2261  def __init__(self, linenum):
2262  _BlockInfo.__init__(self, linenum, True)
2263 
2264 
2266  """Stores information about a class."""
2267 
2268  def __init__(self, name, class_or_struct, clean_lines, linenum):
2269  _BlockInfo.__init__(self, linenum, False)
2270  self.name = name
2271  self.is_derived = False
2273  if class_or_struct == 'struct':
2274  self.access = 'public'
2275  self.is_struct = True
2276  else:
2277  self.access = 'private'
2278  self.is_struct = False
2279 
2280  # Remember initial indentation level for this class. Using raw_lines here
2281  # instead of elided to account for leading comments.
2282  self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum])
2283 
2284  # Try to find the end of the class. This will be confused by things like:
2285  # class A {
2286  # } *x = { ...
2287  #
2288  # But it's still good enough for CheckSectionSpacing.
2289  self.last_line = 0
2290  depth = 0
2291  for i in range(linenum, clean_lines.NumLines()):
2292  line = clean_lines.elided[i]
2293  depth += line.count('{') - line.count('}')
2294  if not depth:
2295  self.last_line = i
2296  break
2297 
2298  def CheckBegin(self, filename, clean_lines, linenum, error):
2299  # Look for a bare ':'
2300  if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]):
2301  self.is_derived = True
2302 
2303  def CheckEnd(self, filename, clean_lines, linenum, error):
2304  # If there is a DISALLOW macro, it should appear near the end of
2305  # the class.
2306  seen_last_thing_in_class = False
2307  for i in xrange(linenum - 1, self.starting_linenum, -1):
2308  match = Search(
2309  r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' +
2310  self.name + r'\)',
2311  clean_lines.elided[i])
2312  if match:
2313  if seen_last_thing_in_class:
2314  error(filename, i, 'readability/constructors', 3,
2315  match.group(1) + ' should be the last thing in the class')
2316  break
2317 
2318  if not Match(r'^\s*$', clean_lines.elided[i]):
2319  seen_last_thing_in_class = True
2320 
2321  # Check that closing brace is aligned with beginning of the class.
2322  # Only do this if the closing brace is indented by only whitespaces.
2323  # This means we will not check single-line class definitions.
2324  indent = Match(r'^( *)\}', clean_lines.elided[linenum])
2325  if indent and len(indent.group(1)) != self.class_indent:
2326  if self.is_struct:
2327  parent = 'struct ' + self.name
2328  else:
2329  parent = 'class ' + self.name
2330  error(filename, linenum, 'whitespace/indent', 3,
2331  'Closing brace should be aligned with beginning of %s' % parent)
2332 
2333 
2335  """Stores information about a namespace."""
2336 
2337  def __init__(self, name, linenum):
2338  _BlockInfo.__init__(self, linenum, False)
2339  self.name = name or ''
2341 
2342  def CheckEnd(self, filename, clean_lines, linenum, error):
2343  """Check end of namespace comments."""
2344  line = clean_lines.raw_lines[linenum]
2345 
2346  # Check how many lines is enclosed in this namespace. Don't issue
2347  # warning for missing namespace comments if there aren't enough
2348  # lines. However, do apply checks if there is already an end of
2349  # namespace comment and it's incorrect.
2350  #
2351  # TODO(unknown): We always want to check end of namespace comments
2352  # if a namespace is large, but sometimes we also want to apply the
2353  # check if a short namespace contained nontrivial things (something
2354  # other than forward declarations). There is currently no logic on
2355  # deciding what these nontrivial things are, so this check is
2356  # triggered by namespace size only, which works most of the time.
2357  if (linenum - self.starting_linenum < 10
2358  and not Match(r'^\s*};*\s*(//|/\*).*\bnamespace\b', line)):
2359  return
2360 
2361  # Look for matching comment at end of namespace.
2362  #
2363  # Note that we accept C style "/* */" comments for terminating
2364  # namespaces, so that code that terminate namespaces inside
2365  # preprocessor macros can be cpplint clean.
2366  #
2367  # We also accept stuff like "// end of namespace <name>." with the
2368  # period at the end.
2369  #
2370  # Besides these, we don't accept anything else, otherwise we might
2371  # get false negatives when existing comment is a substring of the
2372  # expected namespace.
2373  if self.name:
2374  # Named namespace
2375  if not Match((r'^\s*};*\s*(//|/\*).*\bnamespace\s+' +
2376  re.escape(self.name) + r'[\*/\.\\\s]*$'),
2377  line):
2378  error(filename, linenum, 'readability/namespace', 5,
2379  'Namespace should be terminated with "// namespace %s"' %
2380  self.name)
2381  else:
2382  # Anonymous namespace
2383  if not Match(r'^\s*};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line):
2384  # If "// namespace anonymous" or "// anonymous namespace (more text)",
2385  # mention "// anonymous namespace" as an acceptable form
2386  if Match(r'^\s*}.*\b(namespace anonymous|anonymous namespace)\b', line):
2387  error(filename, linenum, 'readability/namespace', 5,
2388  'Anonymous namespace should be terminated with "// namespace"'
2389  ' or "// anonymous namespace"')
2390  else:
2391  error(filename, linenum, 'readability/namespace', 5,
2392  'Anonymous namespace should be terminated with "// namespace"')
2393 
2394 
2395 class _PreprocessorInfo(object):
2396  """Stores checkpoints of nesting stacks when #if/#else is seen."""
2397 
2398  def __init__(self, stack_before_if):
2399  # The entire nesting stack before #if
2400  self.stack_before_if = stack_before_if
2401 
2402  # The entire nesting stack up to #else
2404 
2405  # Whether we have already seen #else or #elif
2406  self.seen_else = False
2407 
2408 
2409 class NestingState(object):
2410  """Holds states related to parsing braces."""
2411 
2412  def __init__(self):
2413  # Stack for tracking all braces. An object is pushed whenever we
2414  # see a "{", and popped when we see a "}". Only 3 types of
2415  # objects are possible:
2416  # - _ClassInfo: a class or struct.
2417  # - _NamespaceInfo: a namespace.
2418  # - _BlockInfo: some other type of block.
2419  self.stack = []
2420 
2421  # Top of the previous stack before each Update().
2422  #
2423  # Because the nesting_stack is updated at the end of each line, we
2424  # had to do some convoluted checks to find out what is the current
2425  # scope at the beginning of the line. This check is simplified by
2426  # saving the previous top of nesting stack.
2427  #
2428  # We could save the full stack, but we only need the top. Copying
2429  # the full nesting stack would slow down cpplint by ~10%.
2431 
2432  # Stack of _PreprocessorInfo objects.
2433  self.pp_stack = []
2434 
2435  def SeenOpenBrace(self):
2436  """Check if we have seen the opening brace for the innermost block.
2437 
2438  Returns:
2439  True if we have seen the opening brace, False if the innermost
2440  block is still expecting an opening brace.
2441  """
2442  return (not self.stack) or self.stack[-1].seen_open_brace
2443 
2444  def InNamespaceBody(self):
2445  """Check if we are currently one level inside a namespace body.
2446 
2447  Returns:
2448  True if top of the stack is a namespace block, False otherwise.
2449  """
2450  return self.stack and isinstance(self.stack[-1], _NamespaceInfo)
2451 
2452  def InExternC(self):
2453  """Check if we are currently one level inside an 'extern "C"' block.
2454 
2455  Returns:
2456  True if top of the stack is an extern block, False otherwise.
2457  """
2458  return self.stack and isinstance(self.stack[-1], _ExternCInfo)
2459 
2461  """Check if we are currently one level inside a class or struct declaration.
2462 
2463  Returns:
2464  True if top of the stack is a class/struct, False otherwise.
2465  """
2466  return self.stack and isinstance(self.stack[-1], _ClassInfo)
2467 
2468  def InAsmBlock(self):
2469  """Check if we are currently one level inside an inline ASM block.
2470 
2471  Returns:
2472  True if the top of the stack is a block containing inline ASM.
2473  """
2474  return self.stack and self.stack[-1].inline_asm != _NO_ASM
2475 
2476  def InTemplateArgumentList(self, clean_lines, linenum, pos):
2477  """Check if current position is inside template argument list.
2478 
2479  Args:
2480  clean_lines: A CleansedLines instance containing the file.
2481  linenum: The number of the line to check.
2482  pos: position just after the suspected template argument.
2483  Returns:
2484  True if (linenum, pos) is inside template arguments.
2485  """
2486  while linenum < clean_lines.NumLines():
2487  # Find the earliest character that might indicate a template argument
2488  line = clean_lines.elided[linenum]
2489  match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
2490  if not match:
2491  linenum += 1
2492  pos = 0
2493  continue
2494  token = match.group(1)
2495  pos += len(match.group(0))
2496 
2497  # These things do not look like template argument list:
2498  # class Suspect {
2499  # class Suspect x; }
2500  if token in ('{', '}', ';'): return False
2501 
2502  # These things look like template argument list:
2503  # template <class Suspect>
2504  # template <class Suspect = default_value>
2505  # template <class Suspect[]>
2506  # template <class Suspect...>
2507  if token in ('>', '=', '[', ']', '.'): return True
2508 
2509  # Check if token is an unmatched '<'.
2510  # If not, move on to the next character.
2511  if token != '<':
2512  pos += 1
2513  if pos >= len(line):
2514  linenum += 1
2515  pos = 0
2516  continue
2517 
2518  # We can't be sure if we just find a single '<', and need to
2519  # find the matching '>'.
2520  (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1)
2521  if end_pos < 0:
2522  # Not sure if template argument list or syntax error in file
2523  return False
2524  linenum = end_line
2525  pos = end_pos
2526  return False
2527 
2528  def UpdatePreprocessor(self, line):
2529  """Update preprocessor stack.
2530 
2531  We need to handle preprocessors due to classes like this:
2532  #ifdef SWIG
2533  struct ResultDetailsPageElementExtensionPoint {
2534  #else
2535  struct ResultDetailsPageElementExtensionPoint : public Extension {
2536  #endif
2537 
2538  We make the following assumptions (good enough for most files):
2539  - Preprocessor condition evaluates to true from #if up to first
2540  #else/#elif/#endif.
2541 
2542  - Preprocessor condition evaluates to false from #else/#elif up
2543  to #endif. We still perform lint checks on these lines, but
2544  these do not affect nesting stack.
2545 
2546  Args:
2547  line: current line to check.
2548  """
2549  if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line):
2550  # Beginning of #if block, save the nesting stack here. The saved
2551  # stack will allow us to restore the parsing state in the #else case.
2552  self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack)))
2553  elif Match(r'^\s*#\s*(else|elif)\b', line):
2554  # Beginning of #else block
2555  if self.pp_stack:
2556  if not self.pp_stack[-1].seen_else:
2557  # This is the first #else or #elif block. Remember the
2558  # whole nesting stack up to this point. This is what we
2559  # keep after the #endif.
2560  self.pp_stack[-1].seen_else = True
2561  self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack)
2562 
2563  # Restore the stack to how it was before the #if
2564  self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if)
2565  else:
2566  # TODO(unknown): unexpected #else, issue warning?
2567  pass
2568  elif Match(r'^\s*#\s*endif\b', line):
2569  # End of #if or #else blocks.
2570  if self.pp_stack:
2571  # If we saw an #else, we will need to restore the nesting
2572  # stack to its former state before the #else, otherwise we
2573  # will just continue from where we left off.
2574  if self.pp_stack[-1].seen_else:
2575  # Here we can just use a shallow copy since we are the last
2576  # reference to it.
2577  self.stack = self.pp_stack[-1].stack_before_else
2578  # Drop the corresponding #if
2579  self.pp_stack.pop()
2580  else:
2581  # TODO(unknown): unexpected #endif, issue warning?
2582  pass
2583 
2584  # TODO(unknown): Update() is too long, but we will refactor later.
2585  def Update(self, filename, clean_lines, linenum, error):
2586  """Update nesting state with current line.
2587 
2588  Args:
2589  filename: The name of the current file.
2590  clean_lines: A CleansedLines instance containing the file.
2591  linenum: The number of the line to check.
2592  error: The function to call with any errors found.
2593  """
2594  line = clean_lines.elided[linenum]
2595 
2596  # Remember top of the previous nesting stack.
2597  #
2598  # The stack is always pushed/popped and not modified in place, so
2599  # we can just do a shallow copy instead of copy.deepcopy. Using
2600  # deepcopy would slow down cpplint by ~28%.
2601  if self.stack:
2602  self.previous_stack_top = self.stack[-1]
2603  else:
2604  self.previous_stack_top = None
2605 
2606  # Update pp_stack
2607  self.UpdatePreprocessor(line)
2608 
2609  # Count parentheses. This is to avoid adding struct arguments to
2610  # the nesting stack.
2611  if self.stack:
2612  inner_block = self.stack[-1]
2613  depth_change = line.count('(') - line.count(')')
2614  inner_block.open_parentheses += depth_change
2615 
2616  # Also check if we are starting or ending an inline assembly block.
2617  if inner_block.inline_asm in (_NO_ASM, _END_ASM):
2618  if (depth_change != 0 and
2619  inner_block.open_parentheses == 1 and
2620  _MATCH_ASM.match(line)):
2621  # Enter assembly block
2622  inner_block.inline_asm = _INSIDE_ASM
2623  else:
2624  # Not entering assembly block. If previous line was _END_ASM,
2625  # we will now shift to _NO_ASM state.
2626  inner_block.inline_asm = _NO_ASM
2627  elif (inner_block.inline_asm == _INSIDE_ASM and
2628  inner_block.open_parentheses == 0):
2629  # Exit assembly block
2630  inner_block.inline_asm = _END_ASM
2631 
2632  # Consume namespace declaration at the beginning of the line. Do
2633  # this in a loop so that we catch same line declarations like this:
2634  # namespace proto2 { namespace bridge { class MessageSet; } }
2635  while True:
2636  # Match start of namespace. The "\b\s*" below catches namespace
2637  # declarations even if it weren't followed by a whitespace, this
2638  # is so that we don't confuse our namespace checker. The
2639  # missing spaces will be flagged by CheckSpacing.
2640  namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
2641  if not namespace_decl_match:
2642  break
2643 
2644  new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
2645  self.stack.append(new_namespace)
2646 
2647  line = namespace_decl_match.group(2)
2648  if line.find('{') != -1:
2649  new_namespace.seen_open_brace = True
2650  line = line[line.find('{') + 1:]
2651 
2652  # Look for a class declaration in whatever is left of the line
2653  # after parsing namespaces. The regexp accounts for decorated classes
2654  # such as in:
2655  # class LOCKABLE API Object {
2656  # };
2657  class_decl_match = Match(
2658  r'^(\s*(?:template\s*<[\w\s<>,:]*>\s*)?'
2659  r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))'
2660  r'(.*)$', line)
2661  if (class_decl_match and
2662  (not self.stack or self.stack[-1].open_parentheses == 0)):
2663  # We do not want to accept classes that are actually template arguments:
2664  # template <class Ignore1,
2665  # class Ignore2 = Default<Args>,
2666  # template <Args> class Ignore3>
2667  # void Function() {};
2668  #
2669  # To avoid template argument cases, we scan forward and look for
2670  # an unmatched '>'. If we see one, assume we are inside a
2671  # template argument list.
2672  end_declaration = len(class_decl_match.group(1))
2673  if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration):
2674  self.stack.append(_ClassInfo(
2675  class_decl_match.group(3), class_decl_match.group(2),
2676  clean_lines, linenum))
2677  line = class_decl_match.group(4)
2678 
2679  # If we have not yet seen the opening brace for the innermost block,
2680  # run checks here.
2681  if not self.SeenOpenBrace():
2682  self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
2683 
2684  # Update access control if we are inside a class/struct
2685  if self.stack and isinstance(self.stack[-1], _ClassInfo):
2686  classinfo = self.stack[-1]
2687  access_match = Match(
2688  r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
2689  r':(?:[^:]|$)',
2690  line)
2691  if access_match:
2692  classinfo.access = access_match.group(2)
2693 
2694  # Check that access keywords are indented +1 space. Skip this
2695  # check if the keywords are not preceded by whitespaces.
2696  indent = access_match.group(1)
2697  if (len(indent) != classinfo.class_indent + 1 and
2698  Match(r'^\s*$', indent)):
2699  if classinfo.is_struct:
2700  parent = 'struct ' + classinfo.name
2701  else:
2702  parent = 'class ' + classinfo.name
2703  slots = ''
2704  if access_match.group(3):
2705  slots = access_match.group(3)
2706  error(filename, linenum, 'whitespace/indent', 3,
2707  '%s%s: should be indented +1 space inside %s' % (
2708  access_match.group(2), slots, parent))
2709 
2710  # Consume braces or semicolons from what's left of the line
2711  while True:
2712  # Match first brace, semicolon, or closed parenthesis.
2713  matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
2714  if not matched:
2715  break
2716 
2717  token = matched.group(1)
2718  if token == '{':
2719  # If namespace or class hasn't seen a opening brace yet, mark
2720  # namespace/class head as complete. Push a new block onto the
2721  # stack otherwise.
2722  if not self.SeenOpenBrace():
2723  self.stack[-1].seen_open_brace = True
2724  elif Match(r'^extern\s*"[^"]*"\s*\{', line):
2725  self.stack.append(_ExternCInfo(linenum))
2726  else:
2727  self.stack.append(_BlockInfo(linenum, True))
2728  if _MATCH_ASM.match(line):
2729  self.stack[-1].inline_asm = _BLOCK_ASM
2730 
2731  elif token == ';' or token == ')':
2732  # If we haven't seen an opening brace yet, but we already saw
2733  # a semicolon, this is probably a forward declaration. Pop
2734  # the stack for these.
2735  #
2736  # Similarly, if we haven't seen an opening brace yet, but we
2737  # already saw a closing parenthesis, then these are probably
2738  # function arguments with extra "class" or "struct" keywords.
2739  # Also pop these stack for these.
2740  if not self.SeenOpenBrace():
2741  self.stack.pop()
2742  else: # token == '}'
2743  # Perform end of block checks and pop the stack.
2744  if self.stack:
2745  self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
2746  self.stack.pop()
2747  line = matched.group(2)
2748 
2749  def InnermostClass(self):
2750  """Get class info on the top of the stack.
2751 
2752  Returns:
2753  A _ClassInfo object if we are inside a class, or None otherwise.
2754  """
2755  for i in range(len(self.stack), 0, -1):
2756  classinfo = self.stack[i - 1]
2757  if isinstance(classinfo, _ClassInfo):
2758  return classinfo
2759  return None
2760 
2761  def CheckCompletedBlocks(self, filename, error):
2762  """Checks that all classes and namespaces have been completely parsed.
2763 
2764  Call this when all lines in a file have been processed.
2765  Args:
2766  filename: The name of the current file.
2767  error: The function to call with any errors found.
2768  """
2769  # Note: This test can result in false positives if #ifdef constructs
2770  # get in the way of brace matching. See the testBuildClass test in
2771  # cpplint_unittest.py for an example of this.
2772  for obj in self.stack:
2773  if isinstance(obj, _ClassInfo):
2774  error(filename, obj.starting_linenum, 'build/class', 5,
2775  'Failed to find complete declaration of class %s' %
2776  obj.name)
2777  elif isinstance(obj, _NamespaceInfo):
2778  error(filename, obj.starting_linenum, 'build/namespaces', 5,
2779  'Failed to find complete declaration of namespace %s' %
2780  obj.name)
2781 
2782 
2783 def CheckForNonStandardConstructs(filename, clean_lines, linenum,
2784  nesting_state, error):
2785  r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
2786 
2787  Complain about several constructs which gcc-2 accepts, but which are
2788  not standard C++. Warning about these in lint is one way to ease the
2789  transition to new compilers.
2790  - put storage class first (e.g. "static const" instead of "const static").
2791  - "%lld" instead of %qd" in printf-type functions.
2792  - "%1$d" is non-standard in printf-type functions.
2793  - "\%" is an undefined character escape sequence.
2794  - text after #endif is not allowed.
2795  - invalid inner-style forward declaration.
2796  - >? and <? operators, and their >?= and <?= cousins.
2797 
2798  Additionally, check for constructor/destructor style violations and reference
2799  members, as it is very convenient to do so while checking for
2800  gcc-2 compliance.
2801 
2802  Args:
2803  filename: The name of the current file.
2804  clean_lines: A CleansedLines instance containing the file.
2805  linenum: The number of the line to check.
2806  nesting_state: A NestingState instance which maintains information about
2807  the current stack of nested blocks being parsed.
2808  error: A callable to which errors are reported, which takes 4 arguments:
2809  filename, line number, error level, and message
2810  """
2811 
2812  # Remove comments from the line, but leave in strings for now.
2813  line = clean_lines.lines[linenum]
2814 
2815  if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
2816  error(filename, linenum, 'runtime/printf_format', 3,
2817  '%q in format strings is deprecated. Use %ll instead.')
2818 
2819  if Search(r'printf\s*\(.*".*%\d+\$', line):
2820  error(filename, linenum, 'runtime/printf_format', 2,
2821  '%N$ formats are unconventional. Try rewriting to avoid them.')
2822 
2823  # Remove escaped backslashes before looking for undefined escapes.
2824  line = line.replace('\\\\', '')
2825 
2826  if Search(r'("|\').*\\(%|\[|\(|{)', line):
2827  error(filename, linenum, 'build/printf_format', 3,
2828  '%, [, (, and { are undefined character escapes. Unescape them.')
2829 
2830  # For the rest, work with both comments and strings removed.
2831  line = clean_lines.elided[linenum]
2832 
2833  if Search(r'\b(const|volatile|void|char|short|int|long'
2834  r'|float|double|signed|unsigned'
2835  r'|schar|u?int8|u?int16|u?int32|u?int64)'
2836  r'\s+(register|static|extern|typedef)\b',
2837  line):
2838  error(filename, linenum, 'build/storage_class', 5,
2839  'Storage-class specifier (static, extern, typedef, etc) should be '
2840  'at the beginning of the declaration.')
2841 
2842  if Match(r'\s*#\s*endif\s*[^/\s]+', line):
2843  error(filename, linenum, 'build/endif_comment', 5,
2844  'Uncommented text after #endif is non-standard. Use a comment.')
2845 
2846  if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
2847  error(filename, linenum, 'build/forward_decl', 5,
2848  'Inner-style forward declarations are invalid. Remove this line.')
2849 
2850  if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
2851  line):
2852  error(filename, linenum, 'build/deprecated', 3,
2853  '>? and <? (max and min) operators are non-standard and deprecated.')
2854 
2855  if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
2856  # TODO(unknown): Could it be expanded safely to arbitrary references,
2857  # without triggering too many false positives? The first
2858  # attempt triggered 5 warnings for mostly benign code in the regtest, hence
2859  # the restriction.
2860  # Here's the original regexp, for the reference:
2861  # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
2862  # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
2863  error(filename, linenum, 'runtime/member_string_references', 2,
2864  'const string& members are dangerous. It is much better to use '
2865  'alternatives, such as pointers or simple constants.')
2866 
2867  # Everything else in this function operates on class declarations.
2868  # Return early if the top of the nesting stack is not a class, or if
2869  # the class head is not completed yet.
2870  classinfo = nesting_state.InnermostClass()
2871  if not classinfo or not classinfo.seen_open_brace:
2872  return
2873 
2874  # The class may have been declared with namespace or classname qualifiers.
2875  # The constructor and destructor will not have those qualifiers.
2876  base_classname = classinfo.name.split('::')[-1]
2877 
2878  # Look for single-argument constructors that aren't marked explicit.
2879  # Technically a valid construct, but against style.
2880  explicit_constructor_match = Match(
2881  r'\s+(?:(?:inline|constexpr)\s+)*(explicit\s+)?'
2882  r'(?:(?:inline|constexpr)\s+)*%s\s*'
2883  r'\(((?:[^()]|\([^()]*\))*)\)'
2884  % re.escape(base_classname),
2885  line)
2886 
2887  if explicit_constructor_match:
2888  is_marked_explicit = explicit_constructor_match.group(1)
2889 
2890  if not explicit_constructor_match.group(2):
2891  constructor_args = []
2892  else:
2893  constructor_args = explicit_constructor_match.group(2).split(',')
2894 
2895  # collapse arguments so that commas in template parameter lists and function
2896  # argument parameter lists don't split arguments in two
2897  i = 0
2898  while i < len(constructor_args):
2899  constructor_arg = constructor_args[i]
2900  while (constructor_arg.count('<') > constructor_arg.count('>') or
2901  constructor_arg.count('(') > constructor_arg.count(')')):
2902  constructor_arg += ',' + constructor_args[i + 1]
2903  del constructor_args[i + 1]
2904  constructor_args[i] = constructor_arg
2905  i += 1
2906 
2907  defaulted_args = [arg for arg in constructor_args if '=' in arg]
2908  noarg_constructor = (not constructor_args or # empty arg list
2909  # 'void' arg specifier
2910  (len(constructor_args) == 1 and
2911  constructor_args[0].strip() == 'void'))
2912  onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg
2913  not noarg_constructor) or
2914  # all but at most one arg defaulted
2915  (len(constructor_args) >= 1 and
2916  not noarg_constructor and
2917  len(defaulted_args) >= len(constructor_args) - 1))
2918  initializer_list_constructor = bool(
2919  onearg_constructor and
2920  Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
2921  copy_constructor = bool(
2922  onearg_constructor and
2923  Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
2924  % re.escape(base_classname), constructor_args[0].strip()))
2925 
2926  if (not is_marked_explicit and
2927  onearg_constructor and
2928  not initializer_list_constructor and
2929  not copy_constructor):
2930  if defaulted_args:
2931  error(filename, linenum, 'runtime/explicit', 5,
2932  'Constructors callable with one argument '
2933  'should be marked explicit.')
2934  else:
2935  error(filename, linenum, 'runtime/explicit', 5,
2936  'Single-parameter constructors should be marked explicit.')
2937  elif is_marked_explicit and not onearg_constructor:
2938  if noarg_constructor:
2939  error(filename, linenum, 'runtime/explicit', 5,
2940  'Zero-parameter constructors should not be marked explicit.')
2941 
2942 
2943 def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error):
2944  """Checks for the correctness of various spacing around function calls.
2945 
2946  Args:
2947  filename: The name of the current file.
2948  clean_lines: A CleansedLines instance containing the file.
2949  linenum: The number of the line to check.
2950  error: The function to call with any errors found.
2951  """
2952  line = clean_lines.elided[linenum]
2953 
2954  # Since function calls often occur inside if/for/while/switch
2955  # expressions - which have their own, more liberal conventions - we
2956  # first see if we should be looking inside such an expression for a
2957  # function call, to which we can apply more strict standards.
2958  fncall = line # if there's no control flow construct, look at whole line
2959  for pattern in (r'\bif\s*\((.*)\)\s*{',
2960  r'\bfor\s*\((.*)\)\s*{',
2961  r'\bwhile\s*\((.*)\)\s*[{;]',
2962  r'\bswitch\s*\((.*)\)\s*{'):
2963  match = Search(pattern, line)
2964  if match:
2965  fncall = match.group(1) # look inside the parens for function calls
2966  break
2967 
2968  # Except in if/for/while/switch, there should never be space
2969  # immediately inside parens (eg "f( 3, 4 )"). We make an exception
2970  # for nested parens ( (a+b) + c ). Likewise, there should never be
2971  # a space before a ( when it's a function argument. I assume it's a
2972  # function argument when the char before the whitespace is legal in
2973  # a function name (alnum + _) and we're not starting a macro. Also ignore
2974  # pointers and references to arrays and functions coz they're too tricky:
2975  # we use a very simple way to recognize these:
2976  # " (something)(maybe-something)" or
2977  # " (something)(maybe-something," or
2978  # " (something)[something]"
2979  # Note that we assume the contents of [] to be short enough that
2980  # they'll never need to wrap.
2981  if ( # Ignore control structures.
2982  not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b',
2983  fncall) and
2984  # Ignore pointers/references to functions.
2985  not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and
2986  # Ignore pointers/references to arrays.
2987  not Search(r' \([^)]+\)\[[^\]]+\]', fncall)):
2988  if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call
2989  error(filename, linenum, 'whitespace/parens', 4,
2990  'Extra space after ( in function call')
2991  elif Search(r'\(\s+(?!(\s*\\)|\()', fncall):
2992  error(filename, linenum, 'whitespace/parens', 2,
2993  'Extra space after (')
2994  if (Search(r'\w\s+\(', fncall) and
2995  not Search(r'_{0,2}asm_{0,2}\s+_{0,2}volatile_{0,2}\s+\(', fncall) and
2996  not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and
2997  not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and
2998  not Search(r'\bcase\s+\(', fncall)):
2999  # TODO(unknown): Space after an operator function seem to be a common
3000  # error, silence those for now by restricting them to highest verbosity.
3001  if Search(r'\boperator_*\b', line):
3002  error(filename, linenum, 'whitespace/parens', 0,
3003  'Extra space before ( in function call')
3004  else:
3005  error(filename, linenum, 'whitespace/parens', 4,
3006  'Extra space before ( in function call')
3007  # If the ) is followed only by a newline or a { + newline, assume it's
3008  # part of a control statement (if/while/etc), and don't complain
3009  if Search(r'[^)]\s+\)\s*[^{\s]', fncall):
3010  # If the closing parenthesis is preceded by only whitespaces,
3011  # try to give a more descriptive error message.
3012  if Search(r'^\s+\)', fncall):
3013  error(filename, linenum, 'whitespace/parens', 2,
3014  'Closing ) should be moved to the previous line')
3015  else:
3016  error(filename, linenum, 'whitespace/parens', 2,
3017  'Extra space before )')
3018 
3019 
3020 def IsBlankLine(line):
3021  """Returns true if the given line is blank.
3022 
3023  We consider a line to be blank if the line is empty or consists of
3024  only white spaces.
3025 
3026  Args:
3027  line: A line of a string.
3028 
3029  Returns:
3030  True, if the given line is blank.
3031  """
3032  return not line or line.isspace()
3033 
3034 
3035 def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
3036  error):
3037  is_namespace_indent_item = (
3038  len(nesting_state.stack) > 1 and
3039  nesting_state.stack[-1].check_namespace_indentation and
3040  isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and
3041  nesting_state.previous_stack_top == nesting_state.stack[-2])
3042 
3043  if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
3044  clean_lines.elided, line):
3045  CheckItemIndentationInNamespace(filename, clean_lines.elided,
3046  line, error)
3047 
3048 
3049 def CheckForFunctionLengths(filename, clean_lines, linenum,
3050  function_state, error):
3051  """Reports for long function bodies.
3052 
3053  For an overview why this is done, see:
3054  https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions
3055 
3056  Uses a simplistic algorithm assuming other style guidelines
3057  (especially spacing) are followed.
3058  Only checks unindented functions, so class members are unchecked.
3059  Trivial bodies are unchecked, so constructors with huge initializer lists
3060  may be missed.
3061  Blank/comment lines are not counted so as to avoid encouraging the removal
3062  of vertical space and comments just to get through a lint check.
3063  NOLINT *on the last line of a function* disables this check.
3064 
3065  Args:
3066  filename: The name of the current file.
3067  clean_lines: A CleansedLines instance containing the file.
3068  linenum: The number of the line to check.
3069  function_state: Current function name and lines in body so far.
3070  error: The function to call with any errors found.
3071  """
3072  lines = clean_lines.lines
3073  line = lines[linenum]
3074  joined_line = ''
3075 
3076  starting_func = False
3077  regexp = r'(\w(\w|::|\*|\&|\s)*)\(' # decls * & space::name( ...
3078  match_result = Match(regexp, line)
3079  if match_result:
3080  # If the name is all caps and underscores, figure it's a macro and
3081  # ignore it, unless it's TEST or TEST_F.
3082  function_name = match_result.group(1).split()[-1]
3083  if function_name == 'TEST' or function_name == 'TEST_F' or (
3084  not Match(r'[A-Z_]+$', function_name)):
3085  starting_func = True
3086 
3087  if starting_func:
3088  body_found = False
3089  for start_linenum in xrange(linenum, clean_lines.NumLines()):
3090  start_line = lines[start_linenum]
3091  joined_line += ' ' + start_line.lstrip()
3092  if Search(r'(;|})', start_line): # Declarations and trivial functions
3093  body_found = True
3094  break # ... ignore
3095  elif Search(r'{', start_line):
3096  body_found = True
3097  function = Search(r'((\w|:)*)\(', line).group(1)
3098  if Match(r'TEST', function): # Handle TEST... macros
3099  parameter_regexp = Search(r'(\(.*\))', joined_line)
3100  if parameter_regexp: # Ignore bad syntax
3101  function += parameter_regexp.group(1)
3102  else:
3103  function += '()'
3104  function_state.Begin(function)
3105  break
3106  if not body_found:
3107  # No body for the function (or evidence of a non-function) was found.
3108  error(filename, linenum, 'readability/fn_size', 5,
3109  'Lint failed to find start of function body.')
3110  elif Match(r'^\}\s*$', line): # function end
3111  function_state.Check(error, filename, linenum)
3112  function_state.End()
3113  elif not Match(r'^\s*$', line):
3114  function_state.Count() # Count non-blank/non-comment lines.
3115 
3116 
3117 _RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?')
3118 
3119 
3120 def CheckComment(line, filename, linenum, next_line_start, error):
3121  """Checks for common mistakes in comments.
3122 
3123  Args:
3124  line: The line in question.
3125  filename: The name of the current file.
3126  linenum: The number of the line to check.
3127  next_line_start: The first non-whitespace column of the next line.
3128  error: The function to call with any errors found.
3129  """
3130  commentpos = line.find('//')
3131  if commentpos != -1:
3132  # Check if the // may be in quotes. If so, ignore it
3133  if re.sub(r'\\.', '', line[0:commentpos]).count('"') % 2 == 0:
3134  # Allow one space for new scopes, two spaces otherwise:
3135  if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and
3136  ((commentpos >= 1 and
3137  line[commentpos-1] not in string.whitespace) or
3138  (commentpos >= 2 and
3139  line[commentpos-2] not in string.whitespace))):
3140  error(filename, linenum, 'whitespace/comments', 2,
3141  'At least two spaces is best between code and comments')
3142 
3143  # Checks for common mistakes in TODO comments.
3144  comment = line[commentpos:]
3145  match = _RE_PATTERN_TODO.match(comment)
3146  if match:
3147  # One whitespace is correct; zero whitespace is handled elsewhere.
3148  leading_whitespace = match.group(1)
3149  if len(leading_whitespace) > 1:
3150  error(filename, linenum, 'whitespace/todo', 2,
3151  'Too many spaces before TODO')
3152 
3153  username = match.group(2)
3154  if not username:
3155  error(filename, linenum, 'readability/todo', 2,
3156  'Missing username in TODO; it should look like '
3157  '"// TODO(my_username): Stuff."')
3158 
3159  middle_whitespace = match.group(3)
3160  # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison
3161  if middle_whitespace != ' ' and middle_whitespace != '':
3162  error(filename, linenum, 'whitespace/todo', 2,
3163  'TODO(my_username) should be followed by a space')
3164 
3165  # If the comment contains an alphanumeric character, there
3166  # should be a space somewhere between it and the // unless
3167  # it's a /// or //! Doxygen comment.
3168  if (Match(r'//[^ ]*\w', comment) and
3169  not Match(r'(///|//\!)(\s+|$)', comment)):
3170  error(filename, linenum, 'whitespace/comments', 4,
3171  'Should have a space between // and comment')
3172 
3173 
3174 def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
3175  """Checks for the correctness of various spacing issues in the code.
3176 
3177  Things we check for: spaces around operators, spaces after
3178  if/for/while/switch, no spaces around parens in function calls, two
3179  spaces between code and comment, don't start a block with a blank
3180  line, don't end a function with a blank line, don't add a blank line
3181  after public/protected/private, don't have too many blank lines in a row.
3182 
3183  Args:
3184  filename: The name of the current file.
3185  clean_lines: A CleansedLines instance containing the file.
3186  linenum: The number of the line to check.
3187  nesting_state: A NestingState instance which maintains information about
3188  the current stack of nested blocks being parsed.
3189  error: The function to call with any errors found.
3190  """
3191 
3192  # Don't use "elided" lines here, otherwise we can't check commented lines.
3193  # Don't want to use "raw" either, because we don't want to check inside C++11
3194  # raw strings,
3195  raw = clean_lines.lines_without_raw_strings
3196  line = raw[linenum]
3197 
3198  # Before nixing comments, check if the line is blank for no good
3199  # reason. This includes the first line after a block is opened, and
3200  # blank lines at the end of a function (ie, right before a line like '}'
3201  #
3202  # Skip all the blank line checks if we are immediately inside a
3203  # namespace body. In other words, don't issue blank line warnings
3204  # for this block:
3205  # namespace {
3206  #
3207  # }
3208  #
3209  # A warning about missing end of namespace comments will be issued instead.
3210  #
3211  # Also skip blank line checks for 'extern "C"' blocks, which are formatted
3212  # like namespaces.
3213  if (IsBlankLine(line) and
3214  not nesting_state.InNamespaceBody() and
3215  not nesting_state.InExternC()):
3216  elided = clean_lines.elided
3217  prev_line = elided[linenum - 1]
3218  prevbrace = prev_line.rfind('{')
3219  # TODO(unknown): Don't complain if line before blank line, and line after,
3220  # both start with alnums and are indented the same amount.
3221  # This ignores whitespace at the start of a namespace block
3222  # because those are not usually indented.
3223  if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1:
3224  # OK, we have a blank line at the start of a code block. Before we
3225  # complain, we check if it is an exception to the rule: The previous
3226  # non-empty line has the parameters of a function header that are indented
3227  # 4 spaces (because they did not fit in a 80 column line when placed on
3228  # the same line as the function name). We also check for the case where
3229  # the previous line is indented 6 spaces, which may happen when the
3230  # initializers of a constructor do not fit into a 80 column line.
3231  exception = False
3232  if Match(r' {6}\w', prev_line): # Initializer list?
3233  # We are looking for the opening column of initializer list, which
3234  # should be indented 4 spaces to cause 6 space indentation afterwards.
3235  search_position = linenum-2
3236  while (search_position >= 0
3237  and Match(r' {6}\w', elided[search_position])):
3238  search_position -= 1
3239  exception = (search_position >= 0
3240  and elided[search_position][:5] == ' :')
3241  else:
3242  # Search for the function arguments or an initializer list. We use a
3243  # simple heuristic here: If the line is indented 4 spaces; and we have a
3244  # closing paren, without the opening paren, followed by an opening brace
3245  # or colon (for initializer lists) we assume that it is the last line of
3246  # a function header. If we have a colon indented 4 spaces, it is an
3247  # initializer list.
3248  exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)',
3249  prev_line)
3250  or Match(r' {4}:', prev_line))
3251 
3252  if not exception:
3253  error(filename, linenum, 'whitespace/blank_line', 2,
3254  'Redundant blank line at the start of a code block '
3255  'should be deleted.')
3256  # Ignore blank lines at the end of a block in a long if-else
3257  # chain, like this:
3258  # if (condition1) {
3259  # // Something followed by a blank line
3260  #
3261  # } else if (condition2) {
3262  # // Something else
3263  # }
3264  if linenum + 1 < clean_lines.NumLines():
3265  next_line = raw[linenum + 1]
3266  if (next_line
3267  and Match(r'\s*}', next_line)
3268  and next_line.find('} else ') == -1):
3269  error(filename, linenum, 'whitespace/blank_line', 3,
3270  'Redundant blank line at the end of a code block '
3271  'should be deleted.')
3272 
3273  matched = Match(r'\s*(public|protected|private):', prev_line)
3274  if matched:
3275  error(filename, linenum, 'whitespace/blank_line', 3,
3276  'Do not leave a blank line after "%s:"' % matched.group(1))
3277 
3278  # Next, check comments
3279  next_line_start = 0
3280  if linenum + 1 < clean_lines.NumLines():
3281  next_line = raw[linenum + 1]
3282  next_line_start = len(next_line) - len(next_line.lstrip())
3283  CheckComment(line, filename, linenum, next_line_start, error)
3284 
3285  # get rid of comments and strings
3286  line = clean_lines.elided[linenum]
3287 
3288  # You shouldn't have spaces before your brackets, except maybe after
3289  # 'delete []', 'return []() {};', or 'auto [abc, ...] = ...;'.
3290  if Search(r'\w\s+\[', line) and not Search(r'(?:auto&?|delete|return)\s+\[', line):
3291  error(filename, linenum, 'whitespace/braces', 5,
3292  'Extra space before [')
3293 
3294  # In range-based for, we wanted spaces before and after the colon, but
3295  # not around "::" tokens that might appear.
3296  if (Search(r'for *\(.*[^:]:[^: ]', line) or
3297  Search(r'for *\(.*[^: ]:[^:]', line)):
3298  error(filename, linenum, 'whitespace/forcolon', 2,
3299  'Missing space around colon in range-based for loop')
3300 
3301 
3302 def CheckOperatorSpacing(filename, clean_lines, linenum, error):
3303  """Checks for horizontal spacing around operators.
3304 
3305  Args:
3306  filename: The name of the current file.
3307  clean_lines: A CleansedLines instance containing the file.
3308  linenum: The number of the line to check.
3309  error: The function to call with any errors found.
3310  """
3311  line = clean_lines.elided[linenum]
3312 
3313  # Don't try to do spacing checks for operator methods. Do this by
3314  # replacing the troublesome characters with something else,
3315  # preserving column position for all other characters.
3316  #
3317  # The replacement is done repeatedly to avoid false positives from
3318  # operators that call operators.
3319  while True:
3320  match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line)
3321  if match:
3322  line = match.group(1) + ('_' * len(match.group(2))) + match.group(3)
3323  else:
3324  break
3325 
3326  # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
3327  # Otherwise not. Note we only check for non-spaces on *both* sides;
3328  # sometimes people put non-spaces on one side when aligning ='s among
3329  # many lines (not that this is behavior that I approve of...)
3330  if ((Search(r'[\w.]=', line) or
3331  Search(r'=[\w.]', line))
3332  and not Search(r'\b(if|while|for) ', line)
3333  # Operators taken from [lex.operators] in C++11 standard.
3334  and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line)
3335  and not Search(r'operator=', line)):
3336  error(filename, linenum, 'whitespace/operators', 4,
3337  'Missing spaces around =')
3338 
3339  # It's ok not to have spaces around binary operators like + - * /, but if
3340  # there's too little whitespace, we get concerned. It's hard to tell,
3341  # though, so we punt on this one for now. TODO.
3342 
3343  # You should always have whitespace around binary operators.
3344  #
3345  # Check <= and >= first to avoid false positives with < and >, then
3346  # check non-include lines for spacing around < and >.
3347  #
3348  # If the operator is followed by a comma, assume it's be used in a
3349  # macro context and don't do any checks. This avoids false
3350  # positives.
3351  #
3352  # Note that && is not included here. This is because there are too
3353  # many false positives due to RValue references.
3354  match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line)
3355  if match:
3356  error(filename, linenum, 'whitespace/operators', 3,
3357  'Missing spaces around %s' % match.group(1))
3358  elif not Match(r'#.*include', line):
3359  # Look for < that is not surrounded by spaces. This is only
3360  # triggered if both sides are missing spaces, even though
3361  # technically should should flag if at least one side is missing a
3362  # space. This is done to avoid some false positives with shifts.
3363  match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
3364  if match:
3365  (_, _, end_pos) = CloseExpression(
3366  clean_lines, linenum, len(match.group(1)))
3367  if end_pos <= -1:
3368  error(filename, linenum, 'whitespace/operators', 3,
3369  'Missing spaces around <')
3370 
3371  # Look for > that is not surrounded by spaces. Similar to the
3372  # above, we only trigger if both sides are missing spaces to avoid
3373  # false positives with shifts.
3374  match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
3375  if match:
3376  (_, _, start_pos) = ReverseCloseExpression(
3377  clean_lines, linenum, len(match.group(1)))
3378  if start_pos <= -1:
3379  error(filename, linenum, 'whitespace/operators', 3,
3380  'Missing spaces around >')
3381 
3382  # We allow no-spaces around << when used like this: 10<<20, but
3383  # not otherwise (particularly, not when used as streams)
3384  #
3385  # We also allow operators following an opening parenthesis, since
3386  # those tend to be macros that deal with operators.
3387  match = Search(r'(operator|[^\s(<])(?:L|UL|LL|ULL|l|ul|ll|ull)?<<([^\s,=<])', line)
3388  if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and
3389  not (match.group(1) == 'operator' and match.group(2) == ';')):
3390  error(filename, linenum, 'whitespace/operators', 3,
3391  'Missing spaces around <<')
3392 
3393  # We allow no-spaces around >> for almost anything. This is because
3394  # C++11 allows ">>" to close nested templates, which accounts for
3395  # most cases when ">>" is not followed by a space.
3396  #
3397  # We still warn on ">>" followed by alpha character, because that is
3398  # likely due to ">>" being used for right shifts, e.g.:
3399  # value >> alpha
3400  #
3401  # When ">>" is used to close templates, the alphanumeric letter that
3402  # follows would be part of an identifier, and there should still be
3403  # a space separating the template type and the identifier.
3404  # type<type<type>> alpha
3405  match = Search(r'>>[a-zA-Z_]', line)
3406  if match:
3407  error(filename, linenum, 'whitespace/operators', 3,
3408  'Missing spaces around >>')
3409 
3410  # There shouldn't be space around unary operators
3411  match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
3412  if match:
3413  error(filename, linenum, 'whitespace/operators', 4,
3414  'Extra space for operator %s' % match.group(1))
3415 
3416 
3417 def CheckParenthesisSpacing(filename, clean_lines, linenum, error):
3418  """Checks for horizontal spacing around parentheses.
3419 
3420  Args:
3421  filename: The name of the current file.
3422  clean_lines: A CleansedLines instance containing the file.
3423  linenum: The number of the line to check.
3424  error: The function to call with any errors found.
3425  """
3426  line = clean_lines.elided[linenum]
3427 
3428  # No spaces after an if, while, switch, or for
3429  match = Search(r' (if\(|for\(|while\(|switch\()', line)
3430  if match:
3431  error(filename, linenum, 'whitespace/parens', 5,
3432  'Missing space before ( in %s' % match.group(1))
3433 
3434  # For if/for/while/switch, the left and right parens should be
3435  # consistent about how many spaces are inside the parens, and
3436  # there should either be zero or one spaces inside the parens.
3437  # We don't want: "if ( foo)" or "if ( foo )".
3438  # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed.
3439  match = Search(r'\b(if|for|while|switch)\s*'
3440  r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$',
3441  line)
3442  if match:
3443  if len(match.group(2)) != len(match.group(4)):
3444  if not (match.group(3) == ';' and
3445  len(match.group(2)) == 1 + len(match.group(4)) or
3446  not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)):
3447  error(filename, linenum, 'whitespace/parens', 5,
3448  'Mismatching spaces inside () in %s' % match.group(1))
3449  if len(match.group(2)) not in [0, 1]:
3450  error(filename, linenum, 'whitespace/parens', 5,
3451  'Should have zero or one spaces inside ( and ) in %s' %
3452  match.group(1))
3453 
3454 
3455 def CheckCommaSpacing(filename, clean_lines, linenum, error):
3456  """Checks for horizontal spacing near commas and semicolons.
3457 
3458  Args:
3459  filename: The name of the current file.
3460  clean_lines: A CleansedLines instance containing the file.
3461  linenum: The number of the line to check.
3462  error: The function to call with any errors found.
3463  """
3464  raw = clean_lines.lines_without_raw_strings
3465  line = clean_lines.elided[linenum]
3466 
3467  # You should always have a space after a comma (either as fn arg or operator)
3468  #
3469  # This does not apply when the non-space character following the
3470  # comma is another comma, since the only time when that happens is
3471  # for empty macro arguments.
3472  #
3473  # We run this check in two passes: first pass on elided lines to
3474  # verify that lines contain missing whitespaces, second pass on raw
3475  # lines to confirm that those missing whitespaces are not due to
3476  # elided comments.
3477  if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and
3478  Search(r',[^,\s]', raw[linenum])):
3479  error(filename, linenum, 'whitespace/comma', 3,
3480  'Missing space after ,')
3481 
3482  # You should always have a space after a semicolon
3483  # except for few corner cases
3484  # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more
3485  # space after ;
3486  if Search(r';[^\s};\\)/]', line):
3487  error(filename, linenum, 'whitespace/semicolon', 3,
3488  'Missing space after ;')
3489 
3490 
3491 def _IsType(clean_lines, nesting_state, expr):
3492  """Check if expression looks like a type name, returns true if so.
3493 
3494  Args:
3495  clean_lines: A CleansedLines instance containing the file.
3496  nesting_state: A NestingState instance which maintains information about
3497  the current stack of nested blocks being parsed.
3498  expr: The expression to check.
3499  Returns:
3500  True, if token looks like a type.
3501  """
3502  # Keep only the last token in the expression
3503  last_word = Match(r'^.*(\b\S+)$', expr)
3504  if last_word:
3505  token = last_word.group(1)
3506  else:
3507  token = expr
3508 
3509  # Match native types and stdint types
3510  if _TYPES.match(token):
3511  return True
3512 
3513  # Try a bit harder to match templated types. Walk up the nesting
3514  # stack until we find something that resembles a typename
3515  # declaration for what we are looking for.
3516  typename_pattern = (r'\b(?:typename|class|struct)\s+' + re.escape(token) +
3517  r'\b')
3518  block_index = len(nesting_state.stack) - 1
3519  while block_index >= 0:
3520  if isinstance(nesting_state.stack[block_index], _NamespaceInfo):
3521  return False
3522 
3523  # Found where the opening brace is. We want to scan from this
3524  # line up to the beginning of the function, minus a few lines.
3525  # template <typename Type1, // stop scanning here
3526  # ...>
3527  # class C
3528  # : public ... { // start scanning here
3529  last_line = nesting_state.stack[block_index].starting_linenum
3530 
3531  next_block_start = 0
3532  if block_index > 0:
3533  next_block_start = nesting_state.stack[block_index - 1].starting_linenum
3534  first_line = last_line
3535  while first_line >= next_block_start:
3536  if clean_lines.elided[first_line].find('template') >= 0:
3537  break
3538  first_line -= 1
3539  if first_line < next_block_start:
3540  # Didn't find any "template" keyword before reaching the next block,
3541  # there are probably no template things to check for this block
3542  block_index -= 1
3543  continue
3544 
3545  # Look for typename in the specified range
3546  for i in xrange(first_line, last_line + 1, 1):
3547  if Search(typename_pattern, clean_lines.elided[i]):
3548  return True
3549  block_index -= 1
3550 
3551  return False
3552 
3553 
3554 def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error):
3555  """Checks for horizontal spacing near commas.
3556 
3557  Args:
3558  filename: The name of the current file.
3559  clean_lines: A CleansedLines instance containing the file.
3560  linenum: The number of the line to check.
3561  nesting_state: A NestingState instance which maintains information about
3562  the current stack of nested blocks being parsed.
3563  error: The function to call with any errors found.
3564  """
3565  line = clean_lines.elided[linenum]
3566 
3567  # Except after an opening paren, or after another opening brace (in case of
3568  # an initializer list, for instance), you should have spaces before your
3569  # braces when they are delimiting blocks, classes, namespaces etc.
3570  # And since you should never have braces at the beginning of a line,
3571  # this is an easy test. Except that braces used for initialization don't
3572  # follow the same rule; we often don't want spaces before those.
3573  match = Match(r'^(.*[^ ({>]){', line)
3574 
3575  if match:
3576  # Try a bit harder to check for brace initialization. This
3577  # happens in one of the following forms:
3578  # Constructor() : initializer_list_{} { ... }
3579  # Constructor{}.MemberFunction()
3580  # Type variable{};
3581  # FunctionCall(type{}, ...);
3582  # LastArgument(..., type{});
3583  # LOG(INFO) << type{} << " ...";
3584  # map_of_type[{...}] = ...;
3585  # ternary = expr ? new type{} : nullptr;
3586  # OuterTemplate<InnerTemplateConstructor<Type>{}>
3587  #
3588  # We check for the character following the closing brace, and
3589  # silence the warning if it's one of those listed above, i.e.
3590  # "{.;,)<>]:".
3591  #
3592  # To account for nested initializer list, we allow any number of
3593  # closing braces up to "{;,)<". We can't simply silence the
3594  # warning on first sight of closing brace, because that would
3595  # cause false negatives for things that are not initializer lists.
3596  # Silence this: But not this:
3597  # Outer{ if (...) {
3598  # Inner{...} if (...){ // Missing space before {
3599  # }; }
3600  #
3601  # There is a false negative with this approach if people inserted
3602  # spurious semicolons, e.g. "if (cond){};", but we will catch the
3603  # spurious semicolon with a separate check.
3604  leading_text = match.group(1)
3605  (endline, endlinenum, endpos) = CloseExpression(
3606  clean_lines, linenum, len(match.group(1)))
3607  trailing_text = ''
3608  if endpos > -1:
3609  trailing_text = endline[endpos:]
3610  for offset in xrange(endlinenum + 1,
3611  min(endlinenum + 3, clean_lines.NumLines() - 1)):
3612  trailing_text += clean_lines.elided[offset]
3613  # We also suppress warnings for `uint64_t{expression}` etc., as the style
3614  # guide recommends brace initialization for integral types to avoid
3615  # overflow/truncation.
3616  if (not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text)
3617  and not _IsType(clean_lines, nesting_state, leading_text)):
3618  error(filename, linenum, 'whitespace/braces', 5,
3619  'Missing space before {')
3620 
3621  # Make sure '} else {' has spaces.
3622  if Search(r'}else', line):
3623  error(filename, linenum, 'whitespace/braces', 5,
3624  'Missing space before else')
3625 
3626  # You shouldn't have a space before a semicolon at the end of the line.
3627  # There's a special case for "for" since the style guide allows space before
3628  # the semicolon there.
3629  if Search(r':\s*;\s*$', line):
3630  error(filename, linenum, 'whitespace/semicolon', 5,
3631  'Semicolon defining empty statement. Use {} instead.')
3632  elif Search(r'^\s*;\s*$', line):
3633  error(filename, linenum, 'whitespace/semicolon', 5,
3634  'Line contains only semicolon. If this should be an empty statement, '
3635  'use {} instead.')
3636  elif (Search(r'\s+;\s*$', line) and
3637  not Search(r'\bfor\b', line)):
3638  error(filename, linenum, 'whitespace/semicolon', 5,
3639  'Extra space before last semicolon. If this should be an empty '
3640  'statement, use {} instead.')
3641 
3642 
3643 def IsDecltype(clean_lines, linenum, column):
3644  """Check if the token ending on (linenum, column) is decltype().
3645 
3646  Args:
3647  clean_lines: A CleansedLines instance containing the file.
3648  linenum: the number of the line to check.
3649  column: end column of the token to check.
3650  Returns:
3651  True if this token is decltype() expression, False otherwise.
3652  """
3653  (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column)
3654  if start_col < 0:
3655  return False
3656  if Search(r'\bdecltype\s*$', text[0:start_col]):
3657  return True
3658  return False
3659 
3660 
3661 def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error):
3662  """Checks for additional blank line issues related to sections.
3663 
3664  Currently the only thing checked here is blank line before protected/private.
3665 
3666  Args:
3667  filename: The name of the current file.
3668  clean_lines: A CleansedLines instance containing the file.
3669  class_info: A _ClassInfo objects.
3670  linenum: The number of the line to check.
3671  error: The function to call with any errors found.
3672  """
3673  # Skip checks if the class is small, where small means 25 lines or less.
3674  # 25 lines seems like a good cutoff since that's the usual height of
3675  # terminals, and any class that can't fit in one screen can't really
3676  # be considered "small".
3677  #
3678  # Also skip checks if we are on the first line. This accounts for
3679  # classes that look like
3680  # class Foo { public: ... };
3681  #
3682  # If we didn't find the end of the class, last_line would be zero,
3683  # and the check will be skipped by the first condition.
3684  if (class_info.last_line - class_info.starting_linenum <= 24 or
3685  linenum <= class_info.starting_linenum):
3686  return
3687 
3688  matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum])
3689  if matched:
3690  # Issue warning if the line before public/protected/private was
3691  # not a blank line, but don't do this if the previous line contains
3692  # "class" or "struct". This can happen two ways:
3693  # - We are at the beginning of the class.
3694  # - We are forward-declaring an inner class that is semantically
3695  # private, but needed to be public for implementation reasons.
3696  # Also ignores cases where the previous line ends with a backslash as can be
3697  # common when defining classes in C macros.
3698  prev_line = clean_lines.lines[linenum - 1]
3699  if (not IsBlankLine(prev_line) and
3700  not Search(r'\b(class|struct)\b', prev_line) and
3701  not Search(r'\\$', prev_line)):
3702  # Try a bit harder to find the beginning of the class. This is to
3703  # account for multi-line base-specifier lists, e.g.:
3704  # class Derived
3705  # : public Base {
3706  end_class_head = class_info.starting_linenum
3707  for i in range(class_info.starting_linenum, linenum):
3708  if Search(r'\{\s*$', clean_lines.lines[i]):
3709  end_class_head = i
3710  break
3711  if end_class_head < linenum - 1:
3712  error(filename, linenum, 'whitespace/blank_line', 3,
3713  '"%s:" should be preceded by a blank line' % matched.group(1))
3714 
3715 
3716 def GetPreviousNonBlankLine(clean_lines, linenum):
3717  """Return the most recent non-blank line and its line number.
3718 
3719  Args:
3720  clean_lines: A CleansedLines instance containing the file contents.
3721  linenum: The number of the line to check.
3722 
3723  Returns:
3724  A tuple with two elements. The first element is the contents of the last
3725  non-blank line before the current line, or the empty string if this is the
3726  first non-blank line. The second is the line number of that line, or -1
3727  if this is the first non-blank line.
3728  """
3729 
3730  prevlinenum = linenum - 1
3731  while prevlinenum >= 0:
3732  prevline = clean_lines.elided[prevlinenum]
3733  if not IsBlankLine(prevline): # if not a blank line...
3734  return (prevline, prevlinenum)
3735  prevlinenum -= 1
3736  return ('', -1)
3737 
3738 
3739 def CheckBraces(filename, clean_lines, linenum, error):
3740  """Looks for misplaced braces (e.g. at the end of line).
3741 
3742  Args:
3743  filename: The name of the current file.
3744  clean_lines: A CleansedLines instance containing the file.
3745  linenum: The number of the line to check.
3746  error: The function to call with any errors found.
3747  """
3748 
3749  line = clean_lines.elided[linenum] # get rid of comments and strings
3750 
3751  if Match(r'\s*{\s*$', line):
3752  # We allow an open brace to start a line in the case where someone is using
3753  # braces in a block to explicitly create a new scope, which is commonly used
3754  # to control the lifetime of stack-allocated variables. Braces are also
3755  # used for brace initializers inside function calls. We don't detect this
3756  # perfectly: we just don't complain if the last non-whitespace character on
3757  # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the
3758  # previous line starts a preprocessor block. We also allow a brace on the
3759  # following line if it is part of an array initialization and would not fit
3760  # within the 80 character limit of the preceding line.
3761  prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
3762  if (not Search(r'[,;:}{(]\s*$', prevline) and
3763  not Match(r'\s*#', prevline) and
3764  not (GetLineWidth(prevline) > _line_length - 2 and '[]' in prevline)):
3765  error(filename, linenum, 'whitespace/braces', 4,
3766  '{ should almost always be at the end of the previous line')
3767 
3768  # An else clause should be on the same line as the preceding closing brace.
3769  if Match(r'\s*else\b\s*(?:if\b|\{|$)', line):
3770  prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
3771  if Match(r'\s*}\s*$', prevline):
3772  error(filename, linenum, 'whitespace/newline', 4,
3773  'An else should appear on the same line as the preceding }')
3774 
3775  # If braces come on one side of an else, they should be on both.
3776  # However, we have to worry about "else if" that spans multiple lines!
3777  if Search(r'else if\s*\(', line): # could be multi-line if
3778  brace_on_left = bool(Search(r'}\s*else if\s*\(', line))
3779  # find the ( after the if
3780  pos = line.find('else if')
3781  pos = line.find('(', pos)
3782  if pos > 0:
3783  (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos)
3784  brace_on_right = endline[endpos:].find('{') != -1
3785  if brace_on_left != brace_on_right: # must be brace after if
3786  error(filename, linenum, 'readability/braces', 5,
3787  'If an else has a brace on one side, it should have it on both')
3788  elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line):
3789  error(filename, linenum, 'readability/braces', 5,
3790  'If an else has a brace on one side, it should have it on both')
3791 
3792  # Likewise, an else should never have the else clause on the same line
3793  if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line):
3794  error(filename, linenum, 'whitespace/newline', 4,
3795  'Else clause should never be on same line as else (use 2 lines)')
3796 
3797  # In the same way, a do/while should never be on one line
3798  if Match(r'\s*do [^\s{]', line):
3799  error(filename, linenum, 'whitespace/newline', 4,
3800  'do/while clauses should not be on a single line')
3801 
3802  # Check single-line if/else bodies. The style guide says 'curly braces are not
3803  # required for single-line statements'. We additionally allow multi-line,
3804  # single statements, but we reject anything with more than one semicolon in
3805  # it. This means that the first semicolon after the if should be at the end of
3806  # its line, and the line after that should have an indent level equal to or
3807  # lower than the if. We also check for ambiguous if/else nesting without
3808  # braces.
3809  if_else_match = Search(r'\b(if\s*\(|else\b)', line)
3810  if if_else_match and not Match(r'\s*#', line):
3811  if_indent = GetIndentLevel(line)
3812  endline, endlinenum, endpos = line, linenum, if_else_match.end()
3813  if_match = Search(r'\bif\s*\(', line)
3814  if if_match:
3815  # This could be a multiline if condition, so find the end first.
3816  pos = if_match.end() - 1
3817  (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos)
3818  # Check for an opening brace, either directly after the if or on the next
3819  # line. If found, this isn't a single-statement conditional.
3820  if (not Match(r'\s*{', endline[endpos:])
3821  and not (Match(r'\s*$', endline[endpos:])
3822  and endlinenum < (len(clean_lines.elided) - 1)
3823  and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))):
3824  while (endlinenum < len(clean_lines.elided)
3825  and ';' not in clean_lines.elided[endlinenum][endpos:]):
3826  endlinenum += 1
3827  endpos = 0
3828  if endlinenum < len(clean_lines.elided):
3829  endline = clean_lines.elided[endlinenum]
3830  # We allow a mix of whitespace and closing braces (e.g. for one-liner
3831  # methods) and a single \ after the semicolon (for macros)
3832  endpos = endline.find(';')
3833  if not Match(r';[\s}]*(\\?)$', endline[endpos:]):
3834  # Semicolon isn't the last character, there's something trailing.
3835  # Output a warning if the semicolon is not contained inside
3836  # a lambda expression.
3837  if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$',
3838  endline):
3839  error(filename, linenum, 'readability/braces', 4,
3840  'If/else bodies with multiple statements require braces')
3841  elif endlinenum < len(clean_lines.elided) - 1:
3842  # Make sure the next line is dedented
3843  next_line = clean_lines.elided[endlinenum + 1]
3844  next_indent = GetIndentLevel(next_line)
3845  # With ambiguous nested if statements, this will error out on the
3846  # if that *doesn't* match the else, regardless of whether it's the
3847  # inner one or outer one.
3848  if (if_match and Match(r'\s*else\b', next_line)
3849  and next_indent != if_indent):
3850  error(filename, linenum, 'readability/braces', 4,
3851  'Else clause should be indented at the same level as if. '
3852  'Ambiguous nested if/else chains require braces.')
3853  elif next_indent > if_indent:
3854  error(filename, linenum, 'readability/braces', 4,
3855  'If/else bodies with multiple statements require braces')
3856 
3857 
3858 def CheckTrailingSemicolon(filename, clean_lines, linenum, error):
3859  """Looks for redundant trailing semicolon.
3860 
3861  Args:
3862  filename: The name of the current file.
3863  clean_lines: A CleansedLines instance containing the file.
3864  linenum: The number of the line to check.
3865  error: The function to call with any errors found.
3866  """
3867 
3868  line = clean_lines.elided[linenum]
3869 
3870  # Block bodies should not be followed by a semicolon. Due to C++11
3871  # brace initialization, there are more places where semicolons are
3872  # required than not, so we use a whitelist approach to check these
3873  # rather than a blacklist. These are the places where "};" should
3874  # be replaced by just "}":
3875  # 1. Some flavor of block following closing parenthesis:
3876  # for (;;) {};
3877  # while (...) {};
3878  # switch (...) {};
3879  # Function(...) {};
3880  # if (...) {};
3881  # if (...) else if (...) {};
3882  #
3883  # 2. else block:
3884  # if (...) else {};
3885  #
3886  # 3. const member function:
3887  # Function(...) const {};
3888  #
3889  # 4. Block following some statement:
3890  # x = 42;
3891  # {};
3892  #
3893  # 5. Block at the beginning of a function:
3894  # Function(...) {
3895  # {};
3896  # }
3897  #
3898  # Note that naively checking for the preceding "{" will also match
3899  # braces inside multi-dimensional arrays, but this is fine since
3900  # that expression will not contain semicolons.
3901  #
3902  # 6. Block following another block:
3903  # while (true) {}
3904  # {};
3905  #
3906  # 7. End of namespaces:
3907  # namespace {};
3908  #
3909  # These semicolons seems far more common than other kinds of
3910  # redundant semicolons, possibly due to people converting classes
3911  # to namespaces. For now we do not warn for this case.
3912  #
3913  # Try matching case 1 first.
3914  match = Match(r'^(.*\)\s*)\{', line)
3915  if match:
3916  # Matched closing parenthesis (case 1). Check the token before the
3917  # matching opening parenthesis, and don't warn if it looks like a
3918  # macro. This avoids these false positives:
3919  # - macro that defines a base class
3920  # - multi-line macro that defines a base class
3921  # - macro that defines the whole class-head
3922  #
3923  # But we still issue warnings for macros that we know are safe to
3924  # warn, specifically:
3925  # - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P
3926  # - TYPED_TEST
3927  # - INTERFACE_DEF
3928  # - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED:
3929  #
3930  # We implement a whitelist of safe macros instead of a blacklist of
3931  # unsafe macros, even though the latter appears less frequently in
3932  # google code and would have been easier to implement. This is because
3933  # the downside for getting the whitelist wrong means some extra
3934  # semicolons, while the downside for getting the blacklist wrong
3935  # would result in compile errors.
3936  #
3937  # In addition to macros, we also don't want to warn on
3938  # - Compound literals
3939  # - Lambdas
3940  # - alignas specifier with anonymous structs
3941  # - decltype
3942  closing_brace_pos = match.group(1).rfind(')')
3943  opening_parenthesis = ReverseCloseExpression(
3944  clean_lines, linenum, closing_brace_pos)
3945  if opening_parenthesis[2] > -1:
3946  line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]]
3947  macro = Search(r'\b([A-Z_][A-Z0-9_]*)\s*$', line_prefix)
3948  func = Match(r'^(.*\])\s*$', line_prefix)
3949  if ((macro and
3950  macro.group(1) not in (
3951  'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST',
3952  'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED',
3953  'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or
3954  (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or
3955  Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or
3956  Search(r'\bdecltype$', line_prefix) or
3957  Search(r'\s+=\s*$', line_prefix)):
3958  match = None
3959  if (match and
3960  opening_parenthesis[1] > 1 and
3961  Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])):
3962  # Multi-line lambda-expression
3963  match = None
3964 
3965  else:
3966  # Try matching cases 2-3.
3967  match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line)
3968  if not match:
3969  # Try matching cases 4-6. These are always matched on separate lines.
3970  #
3971  # Note that we can't simply concatenate the previous line to the
3972  # current line and do a single match, otherwise we may output
3973  # duplicate warnings for the blank line case:
3974  # if (cond) {
3975  # // blank line
3976  # }
3977  prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0]
3978  if prevline and Search(r'[;{}]\s*$', prevline):
3979  match = Match(r'^(\s*)\{', line)
3980 
3981  # Check matching closing brace
3982  if match:
3983  (endline, endlinenum, endpos) = CloseExpression(
3984  clean_lines, linenum, len(match.group(1)))
3985  if endpos > -1 and Match(r'^\s*;', endline[endpos:]):
3986  # Current {} pair is eligible for semicolon check, and we have found
3987  # the redundant semicolon, output warning here.
3988  #
3989  # Note: because we are scanning forward for opening braces, and
3990  # outputting warnings for the matching closing brace, if there are
3991  # nested blocks with trailing semicolons, we will get the error
3992  # messages in reversed order.
3993 
3994  # We need to check the line forward for NOLINT
3995  raw_lines = clean_lines.raw_lines
3996  ParseNolintSuppressions(filename, raw_lines[endlinenum-1], endlinenum-1,
3997  error)
3998  ParseNolintSuppressions(filename, raw_lines[endlinenum], endlinenum,
3999  error)
4000 
4001  error(filename, endlinenum, 'readability/braces', 4,
4002  "You don't need a ; after a }")
4003 
4004 
4005 def CheckEmptyBlockBody(filename, clean_lines, linenum, error):
4006  """Look for empty loop/conditional body with only a single semicolon.
4007 
4008  Args:
4009  filename: The name of the current file.
4010  clean_lines: A CleansedLines instance containing the file.
4011  linenum: The number of the line to check.
4012  error: The function to call with any errors found.
4013  """
4014 
4015  # Search for loop keywords at the beginning of the line. Because only
4016  # whitespaces are allowed before the keywords, this will also ignore most
4017  # do-while-loops, since those lines should start with closing brace.
4018  #
4019  # We also check "if" blocks here, since an empty conditional block
4020  # is likely an error.
4021  line = clean_lines.elided[linenum]
4022  matched = Match(r'\s*(for|while|if)\s*\(', line)
4023  if matched:
4024  # Find the end of the conditional expression.
4025  (end_line, end_linenum, end_pos) = CloseExpression(
4026  clean_lines, linenum, line.find('('))
4027 
4028  # Output warning if what follows the condition expression is a semicolon.
4029  # No warning for all other cases, including whitespace or newline, since we
4030  # have a separate check for semicolons preceded by whitespace.
4031  if end_pos >= 0 and Match(r';', end_line[end_pos:]):
4032  if matched.group(1) == 'if':
4033  error(filename, end_linenum, 'whitespace/empty_conditional_body', 5,
4034  'Empty conditional bodies should use {}')
4035  else:
4036  error(filename, end_linenum, 'whitespace/empty_loop_body', 5,
4037  'Empty loop bodies should use {} or continue')
4038 
4039  # Check for if statements that have completely empty bodies (no comments)
4040  # and no else clauses.
4041  if end_pos >= 0 and matched.group(1) == 'if':
4042  # Find the position of the opening { for the if statement.
4043  # Return without logging an error if it has no brackets.
4044  opening_linenum = end_linenum
4045  opening_line_fragment = end_line[end_pos:]
4046  # Loop until EOF or find anything that's not whitespace or opening {.
4047  while not Search(r'^\s*\{', opening_line_fragment):
4048  if Search(r'^(?!\s*$)', opening_line_fragment):
4049  # Conditional has no brackets.
4050  return
4051  opening_linenum += 1
4052  if opening_linenum == len(clean_lines.elided):
4053  # Couldn't find conditional's opening { or any code before EOF.
4054  return
4055  opening_line_fragment = clean_lines.elided[opening_linenum]
4056  # Set opening_line (opening_line_fragment may not be entire opening line).
4057  opening_line = clean_lines.elided[opening_linenum]
4058 
4059  # Find the position of the closing }.
4060  opening_pos = opening_line_fragment.find('{')
4061  if opening_linenum == end_linenum:
4062  # We need to make opening_pos relative to the start of the entire line.
4063  opening_pos += end_pos
4064  (closing_line, closing_linenum, closing_pos) = CloseExpression(
4065  clean_lines, opening_linenum, opening_pos)
4066  if closing_pos < 0:
4067  return
4068 
4069  # Now construct the body of the conditional. This consists of the portion
4070  # of the opening line after the {, all lines until the closing line,
4071  # and the portion of the closing line before the }.
4072  if (clean_lines.raw_lines[opening_linenum] !=
4073  CleanseComments(clean_lines.raw_lines[opening_linenum])):
4074  # Opening line ends with a comment, so conditional isn't empty.
4075  return
4076  if closing_linenum > opening_linenum:
4077  # Opening line after the {. Ignore comments here since we checked above.
4078  body = list(opening_line[opening_pos+1:])
4079  # All lines until closing line, excluding closing line, with comments.
4080  body.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum])
4081  # Closing line before the }. Won't (and can't) have comments.
4082  body.append(clean_lines.elided[closing_linenum][:closing_pos-1])
4083  body = '\n'.join(body)
4084  else:
4085  # If statement has brackets and fits on a single line.
4086  body = opening_line[opening_pos+1:closing_pos-1]
4087 
4088  # Check if the body is empty
4089  if not _EMPTY_CONDITIONAL_BODY_PATTERN.search(body):
4090  return
4091  # The body is empty. Now make sure there's not an else clause.
4092  current_linenum = closing_linenum
4093  current_line_fragment = closing_line[closing_pos:]
4094  # Loop until EOF or find anything that's not whitespace or else clause.
4095  while Search(r'^\s*$|^(?=\s*else)', current_line_fragment):
4096  if Search(r'^(?=\s*else)', current_line_fragment):
4097  # Found an else clause, so don't log an error.
4098  return
4099  current_linenum += 1
4100  if current_linenum == len(clean_lines.elided):
4101  break
4102  current_line_fragment = clean_lines.elided[current_linenum]
4103 
4104  # The body is empty and there's no else clause until EOF or other code.
4105  error(filename, end_linenum, 'whitespace/empty_if_body', 4,
4106  ('If statement had no body and no else clause'))
4107 
4108 
4109 def FindCheckMacro(line):
4110  """Find a replaceable CHECK-like macro.
4111 
4112  Args:
4113  line: line to search on.
4114  Returns:
4115  (macro name, start position), or (None, -1) if no replaceable
4116  macro is found.
4117  """
4118  for macro in _CHECK_MACROS:
4119  i = line.find(macro)
4120  if i >= 0:
4121  # Find opening parenthesis. Do a regular expression match here
4122  # to make sure that we are matching the expected CHECK macro, as
4123  # opposed to some other macro that happens to contain the CHECK
4124  # substring.
4125  matched = Match(r'^(.*\b' + macro + r'\s*)\(', line)
4126  if not matched:
4127  continue
4128  return (macro, len(matched.group(1)))
4129  return (None, -1)
4130 
4131 
4132 def CheckCheck(filename, clean_lines, linenum, error):
4133  """Checks the use of CHECK and EXPECT macros.
4134 
4135  Args:
4136  filename: The name of the current file.
4137  clean_lines: A CleansedLines instance containing the file.
4138  linenum: The number of the line to check.
4139  error: The function to call with any errors found.
4140  """
4141 
4142  # Decide the set of replacement macros that should be suggested
4143  lines = clean_lines.elided
4144  (check_macro, start_pos) = FindCheckMacro(lines[linenum])
4145  if not check_macro:
4146  return
4147 
4148  # Find end of the boolean expression by matching parentheses
4149  (last_line, end_line, end_pos) = CloseExpression(
4150  clean_lines, linenum, start_pos)
4151  if end_pos < 0:
4152  return
4153 
4154  # If the check macro is followed by something other than a
4155  # semicolon, assume users will log their own custom error messages
4156  # and don't suggest any replacements.
4157  if not Match(r'\s*;', last_line[end_pos:]):
4158  return
4159 
4160  if linenum == end_line:
4161  expression = lines[linenum][start_pos + 1:end_pos - 1]
4162  else:
4163  expression = lines[linenum][start_pos + 1:]
4164  for i in xrange(linenum + 1, end_line):
4165  expression += lines[i]
4166  expression += last_line[0:end_pos - 1]
4167 
4168  # Parse expression so that we can take parentheses into account.
4169  # This avoids false positives for inputs like "CHECK((a < 4) == b)",
4170  # which is not replaceable by CHECK_LE.
4171  lhs = ''
4172  rhs = ''
4173  operator = None
4174  while expression:
4175  matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||'
4176  r'==|!=|>=|>|<=|<|\()(.*)$', expression)
4177  if matched:
4178  token = matched.group(1)
4179  if token == '(':
4180  # Parenthesized operand
4181  expression = matched.group(2)
4182  (end, _) = FindEndOfExpressionInLine(expression, 0, ['('])
4183  if end < 0:
4184  return # Unmatched parenthesis
4185  lhs += '(' + expression[0:end]
4186  expression = expression[end:]
4187  elif token in ('&&', '||'):
4188  # Logical and/or operators. This means the expression
4189  # contains more than one term, for example:
4190  # CHECK(42 < a && a < b);
4191  #
4192  # These are not replaceable with CHECK_LE, so bail out early.
4193  return
4194  elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'):
4195  # Non-relational operator
4196  lhs += token
4197  expression = matched.group(2)
4198  else:
4199  # Relational operator
4200  operator = token
4201  rhs = matched.group(2)
4202  break
4203  else:
4204  # Unparenthesized operand. Instead of appending to lhs one character
4205  # at a time, we do another regular expression match to consume several
4206  # characters at once if possible. Trivial benchmark shows that this
4207  # is more efficient when the operands are longer than a single
4208  # character, which is generally the case.
4209  matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression)
4210  if not matched:
4211  matched = Match(r'^(\s*\S)(.*)$', expression)
4212  if not matched:
4213  break
4214  lhs += matched.group(1)
4215  expression = matched.group(2)
4216 
4217  # Only apply checks if we got all parts of the boolean expression
4218  if not (lhs and operator and rhs):
4219  return
4220 
4221  # Check that rhs do not contain logical operators. We already know
4222  # that lhs is fine since the loop above parses out && and ||.
4223  if rhs.find('&&') > -1 or rhs.find('||') > -1:
4224  return
4225 
4226  # At least one of the operands must be a constant literal. This is
4227  # to avoid suggesting replacements for unprintable things like
4228  # CHECK(variable != iterator)
4229  #
4230  # The following pattern matches decimal, hex integers, strings, and
4231  # characters (in that order).
4232  lhs = lhs.strip()
4233  rhs = rhs.strip()
4234  match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$'
4235  if Match(match_constant, lhs) or Match(match_constant, rhs):
4236  # Note: since we know both lhs and rhs, we can provide a more
4237  # descriptive error message like:
4238  # Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42)
4239  # Instead of:
4240  # Consider using CHECK_EQ instead of CHECK(a == b)
4241  #
4242  # We are still keeping the less descriptive message because if lhs
4243  # or rhs gets long, the error message might become unreadable.
4244  error(filename, linenum, 'readability/check', 2,
4245  'Consider using %s instead of %s(a %s b)' % (
4246  _CHECK_REPLACEMENT[check_macro][operator],
4247  check_macro, operator))
4248 
4249 
4250 def CheckAltTokens(filename, clean_lines, linenum, error):
4251  """Check alternative keywords being used in boolean expressions.
4252 
4253  Args:
4254  filename: The name of the current file.
4255  clean_lines: A CleansedLines instance containing the file.
4256  linenum: The number of the line to check.
4257  error: The function to call with any errors found.
4258  """
4259  line = clean_lines.elided[linenum]
4260 
4261  # Avoid preprocessor lines
4262  if Match(r'^\s*#', line):
4263  return
4264 
4265  # Last ditch effort to avoid multi-line comments. This will not help
4266  # if the comment started before the current line or ended after the
4267  # current line, but it catches most of the false positives. At least,
4268  # it provides a way to workaround this warning for people who use
4269  # multi-line comments in preprocessor macros.
4270  #
4271  # TODO(unknown): remove this once cpplint has better support for
4272  # multi-line comments.
4273  if line.find('/*') >= 0 or line.find('*/') >= 0:
4274  return
4275 
4276  for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line):
4277  error(filename, linenum, 'readability/alt_tokens', 2,
4278  'Use operator %s instead of %s' % (
4279  _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
4280 
4281 
4282 def GetLineWidth(line):
4283  """Determines the width of the line in column positions.
4284 
4285  Args:
4286  line: A string, which may be a Unicode string.
4287 
4288  Returns:
4289  The width of the line in column positions, accounting for Unicode
4290  combining characters and wide characters.
4291  """
4292  if isinstance(line, unicode):
4293  width = 0
4294  for uc in unicodedata.normalize('NFC', line):
4295  if unicodedata.east_asian_width(uc) in ('W', 'F'):
4296  width += 2
4297  elif not unicodedata.combining(uc):
4298  # Issue 337
4299  # https://mail.python.org/pipermail/python-list/2012-August/628809.html
4300  if (sys.version_info.major, sys.version_info.minor) <= (3, 2):
4301  # https://github.com/python/cpython/blob/2.7/Include/unicodeobject.h#L81
4302  is_wide_build = sysconfig.get_config_var("Py_UNICODE_SIZE") >= 4
4303  # https://github.com/python/cpython/blob/2.7/Objects/unicodeobject.c#L564
4304  is_low_surrogate = 0xDC00 <= ord(uc) <= 0xDFFF
4305  if not is_wide_build and is_low_surrogate:
4306  width -= 1
4307 
4308  width += 1
4309  return width
4310  else:
4311  return len(line)
4312 
4313 
4314 def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
4315  error):
4316  """Checks rules from the 'C++ style rules' section of cppguide.html.
4317 
4318  Most of these rules are hard to test (naming, comment style), but we
4319  do what we can. In particular we check for 2-space indents, line lengths,
4320  tab usage, spaces inside code, etc.
4321 
4322  Args:
4323  filename: The name of the current file.
4324  clean_lines: A CleansedLines instance containing the file.
4325  linenum: The number of the line to check.
4326  file_extension: The extension (without the dot) of the filename.
4327  nesting_state: A NestingState instance which maintains information about
4328  the current stack of nested blocks being parsed.
4329  error: The function to call with any errors found.
4330  """
4331 
4332  # Don't use "elided" lines here, otherwise we can't check commented lines.
4333  # Don't want to use "raw" either, because we don't want to check inside C++11
4334  # raw strings,
4335  raw_lines = clean_lines.lines_without_raw_strings
4336  line = raw_lines[linenum]
4337  prev = raw_lines[linenum - 1] if linenum > 0 else ''
4338 
4339  if line.find('\t') != -1:
4340  error(filename, linenum, 'whitespace/tab', 1,
4341  'Tab found; better to use spaces')
4342 
4343  # One or three blank spaces at the beginning of the line is weird; it's
4344  # hard to reconcile that with 2-space indents.
4345  # NOTE: here are the conditions rob pike used for his tests. Mine aren't
4346  # as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces
4347  # if(RLENGTH > 20) complain = 0;
4348  # if(match($0, " +(error|private|public|protected):")) complain = 0;
4349  # if(match(prev, "&& *$")) complain = 0;
4350  # if(match(prev, "\\|\\| *$")) complain = 0;
4351  # if(match(prev, "[\",=><] *$")) complain = 0;
4352  # if(match($0, " <<")) complain = 0;
4353  # if(match(prev, " +for \\(")) complain = 0;
4354  # if(prevodd && match(prevprev, " +for \\(")) complain = 0;
4355  scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$'
4356  classinfo = nesting_state.InnermostClass()
4357  initial_spaces = 0
4358  cleansed_line = clean_lines.elided[linenum]
4359  while initial_spaces < len(line) and line[initial_spaces] == ' ':
4360  initial_spaces += 1
4361  # There are certain situations we allow one space, notably for
4362  # section labels, and also lines containing multi-line raw strings.
4363  # We also don't check for lines that look like continuation lines
4364  # (of lines ending in double quotes, commas, equals, or angle brackets)
4365  # because the rules for how to indent those are non-trivial.
4366  if (not Search(r'[",=><] *$', prev) and
4367  (initial_spaces == 1 or initial_spaces == 3) and
4368  not Match(scope_or_label_pattern, cleansed_line) and
4369  not (clean_lines.raw_lines[linenum] != line and
4370  Match(r'^\s*""', line))):
4371  error(filename, linenum, 'whitespace/indent', 3,
4372  'Weird number of spaces at line-start. '
4373  'Are you using a 2-space indent?')
4374 
4375  if line and line[-1].isspace():
4376  error(filename, linenum, 'whitespace/end_of_line', 4,
4377  'Line ends in whitespace. Consider deleting these extra spaces.')
4378 
4379  # Check if the line is a header guard.
4380  is_header_guard = False
4381  if IsHeaderExtension(file_extension):
4382  cppvar = GetHeaderGuardCPPVariable(filename)
4383  if (line.startswith('#ifndef %s' % cppvar) or
4384  line.startswith('#define %s' % cppvar) or
4385  line.startswith('#endif // %s' % cppvar)):
4386  is_header_guard = True
4387  # #include lines and header guards can be long, since there's no clean way to
4388  # split them.
4389  #
4390  # URLs can be long too. It's possible to split these, but it makes them
4391  # harder to cut&paste.
4392  #
4393  # The "$Id:...$" comment may also get very long without it being the
4394  # developers fault.
4395  if (not line.startswith('#include') and not is_header_guard and
4396  not Match(r'^\s*//.*http(s?)://\S*$', line) and
4397  not Match(r'^\s*//\s*[^\s]*$', line) and
4398  not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
4399  line_width = GetLineWidth(line)
4400  if line_width > _line_length:
4401  error(filename, linenum, 'whitespace/line_length', 2,
4402  'Lines should be <= %i characters long' % _line_length)
4403 
4404  if (cleansed_line.count(';') > 1 and
4405  # for loops are allowed two ;'s (and may run over two lines).
4406  cleansed_line.find('for') == -1 and
4407  (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
4408  GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
4409  # It's ok to have many commands in a switch case that fits in 1 line
4410  not ((cleansed_line.find('case ') != -1 or
4411  cleansed_line.find('default:') != -1) and
4412  cleansed_line.find('break;') != -1)):
4413  error(filename, linenum, 'whitespace/newline', 0,
4414  'More than one command on the same line')
4415 
4416  # Some more style checks
4417  CheckBraces(filename, clean_lines, linenum, error)
4418  CheckTrailingSemicolon(filename, clean_lines, linenum, error)
4419  CheckEmptyBlockBody(filename, clean_lines, linenum, error)
4420  CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
4421  CheckOperatorSpacing(filename, clean_lines, linenum, error)
4422  CheckParenthesisSpacing(filename, clean_lines, linenum, error)
4423  CheckCommaSpacing(filename, clean_lines, linenum, error)
4424  CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
4425  CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
4426  CheckCheck(filename, clean_lines, linenum, error)
4427  CheckAltTokens(filename, clean_lines, linenum, error)
4428  classinfo = nesting_state.InnermostClass()
4429  if classinfo:
4430  CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
4431 
4432 
4433 _RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$')
4434 # Matches the first component of a filename delimited by -s and _s. That is:
4435 # _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo'
4436 # _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo'
4437 # _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo'
4438 # _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo'
4439 _RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+')
4440 
4441 
4442 def _DropCommonSuffixes(filename):
4443  """Drops common suffixes like _test.cc or -inl.h from filename.
4444 
4445  For example:
4446  >>> _DropCommonSuffixes('foo/foo-inl.h')
4447  'foo/foo'
4448  >>> _DropCommonSuffixes('foo/bar/foo.cc')
4449  'foo/bar/foo'
4450  >>> _DropCommonSuffixes('foo/foo_internal.h')
4451  'foo/foo'
4452  >>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
4453  'foo/foo_unusualinternal'
4454 
4455  Args:
4456  filename: The input filename.
4457 
4458  Returns:
4459  The filename with the common suffix removed.
4460  """
4461  for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
4462  'inl.h', 'impl.h', 'internal.h'):
4463  if (filename.endswith(suffix) and len(filename) > len(suffix) and
4464  filename[-len(suffix) - 1] in ('-', '_')):
4465  return filename[:-len(suffix) - 1]
4466  return os.path.splitext(filename)[0]
4467 
4468 
4469 def _ClassifyInclude(fileinfo, include, is_system):
4470  """Figures out what kind of header 'include' is.
4471 
4472  Args:
4473  fileinfo: The current file cpplint is running over. A FileInfo instance.
4474  include: The path to a #included file.
4475  is_system: True if the #include used <> rather than "".
4476 
4477  Returns:
4478  One of the _XXX_HEADER constants.
4479 
4480  For example:
4481  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True)
4482  _C_SYS_HEADER
4483  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True)
4484  _CPP_SYS_HEADER
4485  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False)
4486  _LIKELY_MY_HEADER
4487  >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'),
4488  ... 'bar/foo_other_ext.h', False)
4489  _POSSIBLE_MY_HEADER
4490  >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False)
4491  _OTHER_HEADER
4492  """
4493  # This is a list of all standard c++ header files, except
4494  # those already checked for above.
4495  is_cpp_h = include in _CPP_HEADERS
4496 
4497  if is_system:
4498  if is_cpp_h:
4499  return _CPP_SYS_HEADER
4500  else:
4501  return _C_SYS_HEADER
4502 
4503  # If the target file and the include we're checking share a
4504  # basename when we drop common extensions, and the include
4505  # lives in . , then it's likely to be owned by the target file.
4506  target_dir, target_base = (
4507  os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName())))
4508  include_dir, include_base = os.path.split(_DropCommonSuffixes(include))
4509  if target_base == include_base and (
4510  include_dir == target_dir or
4511  include_dir == os.path.normpath(target_dir + '/../public')):
4512  return _LIKELY_MY_HEADER
4513 
4514  # If the target and include share some initial basename
4515  # component, it's possible the target is implementing the
4516  # include, so it's allowed to be first, but we'll never
4517  # complain if it's not there.
4518  target_first_component = _RE_FIRST_COMPONENT.match(target_base)
4519  include_first_component = _RE_FIRST_COMPONENT.match(include_base)
4520  if (target_first_component and include_first_component and
4521  target_first_component.group(0) ==
4522  include_first_component.group(0)):
4523  return _POSSIBLE_MY_HEADER
4524 
4525  return _OTHER_HEADER
4526 
4527 
4528 
4529 def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
4530  """Check rules that are applicable to #include lines.
4531 
4532  Strings on #include lines are NOT removed from elided line, to make
4533  certain tasks easier. However, to prevent false positives, checks
4534  applicable to #include lines in CheckLanguage must be put here.
4535 
4536  Args:
4537  filename: The name of the current file.
4538  clean_lines: A CleansedLines instance containing the file.
4539  linenum: The number of the line to check.
4540  include_state: An _IncludeState instance in which the headers are inserted.
4541  error: The function to call with any errors found.
4542  """
4543  fileinfo = FileInfo(filename)
4544  line = clean_lines.lines[linenum]
4545 
4546  # "include" should use the new style "foo/bar.h" instead of just "bar.h"
4547  # Only do this check if the included header follows google naming
4548  # conventions. If not, assume that it's a 3rd party API that
4549  # requires special include conventions.
4550  #
4551  # We also make an exception for Lua headers, which follow google
4552  # naming convention but not the include convention.
4553  match = Match(r'#include\s*"([^/]+\.h)"', line)
4554  if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)):
4555  error(filename, linenum, 'build/include', 4,
4556  'Include the directory when naming .h files')
4557 
4558  # we shouldn't include a file more than once. actually, there are a
4559  # handful of instances where doing so is okay, but in general it's
4560  # not.
4561  match = _RE_PATTERN_INCLUDE.search(line)
4562  if match:
4563  include = match.group(2)
4564  is_system = (match.group(1) == '<')
4565  duplicate_line = include_state.FindHeader(include)
4566  if duplicate_line >= 0:
4567  error(filename, linenum, 'build/include', 4,
4568  '"%s" already included at %s:%s' %
4569  (include, filename, duplicate_line))
4570  elif (include.endswith('.cc') and
4571  os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)):
4572  error(filename, linenum, 'build/include', 4,
4573  'Do not include .cc files from other packages')
4574  elif not _THIRD_PARTY_HEADERS_PATTERN.match(include):
4575  include_state.include_list[-1].append((include, linenum))
4576 
4577  # We want to ensure that headers appear in the right order:
4578  # 1) for foo.cc, foo.h (preferred location)
4579  # 2) c system files
4580  # 3) cpp system files
4581  # 4) for foo.cc, foo.h (deprecated location)
4582  # 5) other google headers
4583  #
4584  # We classify each include statement as one of those 5 types
4585  # using a number of techniques. The include_state object keeps
4586  # track of the highest type seen, and complains if we see a
4587  # lower type after that.
4588  error_message = include_state.CheckNextIncludeOrder(
4589  _ClassifyInclude(fileinfo, include, is_system))
4590  if error_message:
4591  error(filename, linenum, 'build/include_order', 4,
4592  '%s. Should be: %s.h, c system, c++ system, other.' %
4593  (error_message, fileinfo.BaseName()))
4594  canonical_include = include_state.CanonicalizeAlphabeticalOrder(include)
4595  if not include_state.IsInAlphabeticalOrder(
4596  clean_lines, linenum, canonical_include):
4597  error(filename, linenum, 'build/include_alpha', 4,
4598  'Include "%s" not in alphabetical order' % include)
4599  include_state.SetLastHeader(canonical_include)
4600 
4601 
4602 
4603 def _GetTextInside(text, start_pattern):
4604  r"""Retrieves all the text between matching open and close parentheses.
4605 
4606  Given a string of lines and a regular expression string, retrieve all the text
4607  following the expression and between opening punctuation symbols like
4608  (, [, or {, and the matching close-punctuation symbol. This properly nested
4609  occurrences of the punctuations, so for the text like
4610  printf(a(), b(c()));
4611  a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
4612  start_pattern must match string having an open punctuation symbol at the end.
4613 
4614  Args:
4615  text: The lines to extract text. Its comments and strings must be elided.
4616  It can be single line and can span multiple lines.
4617  start_pattern: The regexp string indicating where to start extracting
4618  the text.
4619  Returns:
4620  The extracted text.
4621  None if either the opening string or ending punctuation could not be found.
4622  """
4623  # TODO(unknown): Audit cpplint.py to see what places could be profitably
4624  # rewritten to use _GetTextInside (and use inferior regexp matching today).
4625 
4626  # Give opening punctuations to get the matching close-punctuations.
4627  matching_punctuation = {'(': ')', '{': '}', '[': ']'}
4628  closing_punctuation = set(matching_punctuation.values())
4629 
4630  # Find the position to start extracting text.
4631  match = re.search(start_pattern, text, re.M)
4632  if not match: # start_pattern not found in text.
4633  return None
4634  start_position = match.end(0)
4635 
4636  assert start_position > 0, (
4637  'start_pattern must ends with an opening punctuation.')
4638  assert text[start_position - 1] in matching_punctuation, (
4639  'start_pattern must ends with an opening punctuation.')
4640  # Stack of closing punctuations we expect to have in text after position.
4641  punctuation_stack = [matching_punctuation[text[start_position - 1]]]
4642  position = start_position
4643  while punctuation_stack and position < len(text):
4644  if text[position] == punctuation_stack[-1]:
4645  punctuation_stack.pop()
4646  elif text[position] in closing_punctuation:
4647  # A closing punctuation without matching opening punctuations.
4648  return None
4649  elif text[position] in matching_punctuation:
4650  punctuation_stack.append(matching_punctuation[text[position]])
4651  position += 1
4652  if punctuation_stack:
4653  # Opening punctuations left without matching close-punctuations.
4654  return None
4655  # punctuations match.
4656  return text[start_position:position - 1]
4657 
4658 
4659 # Patterns for matching call-by-reference parameters.
4660 #
4661 # Supports nested templates up to 2 levels deep using this messy pattern:
4662 # < (?: < (?: < [^<>]*
4663 # >
4664 # | [^<>] )*
4665 # >
4666 # | [^<>] )*
4667 # >
4668 _RE_PATTERN_IDENT = r'[_a-zA-Z]\w*' # =~ [[:alpha:]][[:alnum:]]*
4669 _RE_PATTERN_TYPE = (
4670  r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?'
4671  r'(?:\w|'
4672  r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|'
4673  r'::)+')
4674 # A call-by-reference parameter ends with '& identifier'.
4675 _RE_PATTERN_REF_PARAM = re.compile(
4676  r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*'
4677  r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]')
4678 # A call-by-const-reference parameter either ends with 'const& identifier'
4679 # or looks like 'const type& identifier' when 'type' is atomic.
4680 _RE_PATTERN_CONST_REF_PARAM = (
4681  r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT +
4682  r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')')
4683 # Stream types.
4684 _RE_PATTERN_REF_STREAM_PARAM = (
4685  r'(?:.*stream\s*&\s*' + _RE_PATTERN_IDENT + r')')
4686 
4687 
4688 def CheckLanguage(filename, clean_lines, linenum, file_extension,
4689  include_state, nesting_state, error):
4690  """Checks rules from the 'C++ language rules' section of cppguide.html.
4691 
4692  Some of these rules are hard to test (function overloading, using
4693  uint32 inappropriately), but we do the best we can.
4694 
4695  Args:
4696  filename: The name of the current file.
4697  clean_lines: A CleansedLines instance containing the file.
4698  linenum: The number of the line to check.
4699  file_extension: The extension (without the dot) of the filename.
4700  include_state: An _IncludeState instance in which the headers are inserted.
4701  nesting_state: A NestingState instance which maintains information about
4702  the current stack of nested blocks being parsed.
4703  error: The function to call with any errors found.
4704  """
4705  # If the line is empty or consists of entirely a comment, no need to
4706  # check it.
4707  line = clean_lines.elided[linenum]
4708  if not line:
4709  return
4710 
4711  match = _RE_PATTERN_INCLUDE.search(line)
4712  if match:
4713  CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
4714  return
4715 
4716  # Reset include state across preprocessor directives. This is meant
4717  # to silence warnings for conditional includes.
4718  match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line)
4719  if match:
4720  include_state.ResetSection(match.group(1))
4721 
4722  # Make Windows paths like Unix.
4723  fullname = os.path.abspath(filename).replace('\\', '/')
4724 
4725  # Perform other checks now that we are sure that this is not an include line
4726  CheckCasts(filename, clean_lines, linenum, error)
4727  CheckGlobalStatic(filename, clean_lines, linenum, error)
4728  CheckPrintf(filename, clean_lines, linenum, error)
4729 
4730  if IsHeaderExtension(file_extension):
4731  # TODO(unknown): check that 1-arg constructors are explicit.
4732  # How to tell it's a constructor?
4733  # (handled in CheckForNonStandardConstructs for now)
4734  # TODO(unknown): check that classes declare or disable copy/assign
4735  # (level 1 error)
4736  pass
4737 
4738  # Check if people are using the verboten C basic types. The only exception
4739  # we regularly allow is "unsigned short port" for port.
4740  if Search(r'\bshort port\b', line):
4741  if not Search(r'\bunsigned short port\b', line):
4742  error(filename, linenum, 'runtime/int', 4,
4743  'Use "unsigned short" for ports, not "short"')
4744  else:
4745  match = Search(r'\b(short|long(?! +double)|long long)\b', line)
4746  if match:
4747  error(filename, linenum, 'runtime/int', 4,
4748  'Use int16/int64/etc, rather than the C type %s' % match.group(1))
4749 
4750  # Check if some verboten operator overloading is going on
4751  # TODO(unknown): catch out-of-line unary operator&:
4752  # class X {};
4753  # int operator&(const X& x) { return 42; } // unary operator&
4754  # The trick is it's hard to tell apart from binary operator&:
4755  # class Y { int operator&(const Y& x) { return 23; } }; // binary operator&
4756  if Search(r'\boperator\s*&\s*\(\s*\)', line):
4757  error(filename, linenum, 'runtime/operator', 4,
4758  'Unary operator& is dangerous. Do not use it.')
4759 
4760  # Check for suspicious usage of "if" like
4761  # } if (a == b) {
4762  if Search(r'\}\s*if\s*\(', line):
4763  error(filename, linenum, 'readability/braces', 4,
4764  'Did you mean "else if"? If not, start a new line for "if".')
4765 
4766  # Check for potential format string bugs like printf(foo).
4767  # We constrain the pattern not to pick things like DocidForPrintf(foo).
4768  # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str())
4769  # TODO(unknown): Catch the following case. Need to change the calling
4770  # convention of the whole function to process multiple line to handle it.
4771  # printf(
4772  # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line);
4773  printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(')
4774  if printf_args:
4775  match = Match(r'([\w.\->()]+)$', printf_args)
4776  if match and match.group(1) != '__VA_ARGS__':
4777  function_name = re.search(r'\b((?:string)?printf)\s*\(',
4778  line, re.I).group(1)
4779  error(filename, linenum, 'runtime/printf', 4,
4780  'Potential format string bug. Do %s("%%s", %s) instead.'
4781  % (function_name, match.group(1)))
4782 
4783  # Check for potential memset bugs like memset(buf, sizeof(buf), 0).
4784  match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line)
4785  if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)):
4786  error(filename, linenum, 'runtime/memset', 4,
4787  'Did you mean "memset(%s, 0, %s)"?'
4788  % (match.group(1), match.group(2)))
4789 
4790  if Search(r'\busing namespace\b', line):
4791  error(filename, linenum, 'build/namespaces', 5,
4792  'Do not use namespace using-directives. '
4793  'Use using-declarations instead.')
4794 
4795  # Detect variable-length arrays.
4796  match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line)
4797  if (match and match.group(2) != 'return' and match.group(2) != 'delete' and
4798  match.group(3).find(']') == -1):
4799  # Split the size using space and arithmetic operators as delimiters.
4800  # If any of the resulting tokens are not compile time constants then
4801  # report the error.
4802  tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3))
4803  is_const = True
4804  skip_next = False
4805  for tok in tokens:
4806  if skip_next:
4807  skip_next = False
4808  continue
4809 
4810  if Search(r'sizeof\(.+\)', tok): continue
4811  if Search(r'arraysize\(\w+\)', tok): continue
4812 
4813  tok = tok.lstrip('(')
4814  tok = tok.rstrip(')')
4815  if not tok: continue
4816  if Match(r'\d+', tok): continue
4817  if Match(r'0[xX][0-9a-fA-F]+', tok): continue
4818  if Match(r'k[A-Z0-9]\w*', tok): continue
4819  if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue
4820  if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue
4821  # A catch all for tricky sizeof cases, including 'sizeof expression',
4822  # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)'
4823  # requires skipping the next token because we split on ' ' and '*'.
4824  if tok.startswith('sizeof'):
4825  skip_next = True
4826  continue
4827  is_const = False
4828  break
4829  if not is_const:
4830  error(filename, linenum, 'runtime/arrays', 1,
4831  'Do not use variable-length arrays. Use an appropriately named '
4832  "('k' followed by CamelCase) compile-time constant for the size.")
4833 
4834  # Check for use of unnamed namespaces in header files. Registration
4835  # macros are typically OK, so we allow use of "namespace {" on lines
4836  # that end with backslashes.
4837  if (IsHeaderExtension(file_extension)
4838  and Search(r'\bnamespace\s*{', line)
4839  and line[-1] != '\\'):
4840  error(filename, linenum, 'build/namespaces', 4,
4841  'Do not use unnamed namespaces in header files. See '
4842  'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces'
4843  ' for more information.')
4844 
4845 
4846 def CheckGlobalStatic(filename, clean_lines, linenum, error):
4847  """Check for unsafe global or static objects.
4848 
4849  Args:
4850  filename: The name of the current file.
4851  clean_lines: A CleansedLines instance containing the file.
4852  linenum: The number of the line to check.
4853  error: The function to call with any errors found.
4854  """
4855  line = clean_lines.elided[linenum]
4856 
4857  # Match two lines at a time to support multiline declarations
4858  if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line):
4859  line += clean_lines.elided[linenum + 1].strip()
4860 
4861  # Check for people declaring static/global STL strings at the top level.
4862  # This is dangerous because the C++ language does not guarantee that
4863  # globals with constructors are initialized before the first access, and
4864  # also because globals can be destroyed when some threads are still running.
4865  # TODO(unknown): Generalize this to also find static unique_ptr instances.
4866  # TODO(unknown): File bugs for clang-tidy to find these.
4867  match = Match(
4868  r'((?:|static +)(?:|const +))(?::*std::)?string( +const)? +'
4869  r'([a-zA-Z0-9_:]+)\b(.*)',
4870  line)
4871 
4872  # Remove false positives:
4873  # - String pointers (as opposed to values).
4874  # string *pointer
4875  # const string *pointer
4876  # string const *pointer
4877  # string *const pointer
4878  #
4879  # - Functions and template specializations.
4880  # string Function<Type>(...
4881  # string Class<Type>::Method(...
4882  #
4883  # - Operators. These are matched separately because operator names
4884  # cross non-word boundaries, and trying to match both operators
4885  # and functions at the same time would decrease accuracy of
4886  # matching identifiers.
4887  # string Class::operator*()
4888  if (match and
4889  not Search(r'\bstring\b(\s+const)?\s*[\*\&]\s*(const\s+)?\w', line) and
4890  not Search(r'\boperator\W', line) and
4891  not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(4))):
4892  if Search(r'\bconst\b', line):
4893  error(filename, linenum, 'runtime/string', 4,
4894  'For a static/global string constant, use a C style string '
4895  'instead: "%schar%s %s[]".' %
4896  (match.group(1), match.group(2) or '', match.group(3)))
4897  else:
4898  error(filename, linenum, 'runtime/string', 4,
4899  'Static/global string variables are not permitted.')
4900 
4901  if (Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line) or
4902  Search(r'\b([A-Za-z0-9_]*_)\(CHECK_NOTNULL\(\1\)\)', line)):
4903  error(filename, linenum, 'runtime/init', 4,
4904  'You seem to be initializing a member variable with itself.')
4905 
4906 
4907 def CheckPrintf(filename, clean_lines, linenum, error):
4908  """Check for printf related issues.
4909 
4910  Args:
4911  filename: The name of the current file.
4912  clean_lines: A CleansedLines instance containing the file.
4913  linenum: The number of the line to check.
4914  error: The function to call with any errors found.
4915  """
4916  line = clean_lines.elided[linenum]
4917 
4918  # When snprintf is used, the second argument shouldn't be a literal.
4919  match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line)
4920  if match and match.group(2) != '0':
4921  # If 2nd arg is zero, snprintf is used to calculate size.
4922  error(filename, linenum, 'runtime/printf', 3,
4923  'If you can, use sizeof(%s) instead of %s as the 2nd arg '
4924  'to snprintf.' % (match.group(1), match.group(2)))
4925 
4926  # Check if some verboten C functions are being used.
4927  if Search(r'\bsprintf\s*\(', line):
4928  error(filename, linenum, 'runtime/printf', 5,
4929  'Never use sprintf. Use snprintf instead.')
4930  match = Search(r'\b(strcpy|strcat)\s*\(', line)
4931  if match:
4932  error(filename, linenum, 'runtime/printf', 4,
4933  'Almost always, snprintf is better than %s' % match.group(1))
4934 
4935 
4936 def IsDerivedFunction(clean_lines, linenum):
4937  """Check if current line contains an inherited function.
4938 
4939  Args:
4940  clean_lines: A CleansedLines instance containing the file.
4941  linenum: The number of the line to check.
4942  Returns:
4943  True if current line contains a function with "override"
4944  virt-specifier.
4945  """
4946  # Scan back a few lines for start of current function
4947  for i in xrange(linenum, max(-1, linenum - 10), -1):
4948  match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
4949  if match:
4950  # Look for "override" after the matching closing parenthesis
4951  line, _, closing_paren = CloseExpression(
4952  clean_lines, i, len(match.group(1)))
4953  return (closing_paren >= 0 and
4954  Search(r'\boverride\b', line[closing_paren:]))
4955  return False
4956 
4957 
4958 def IsOutOfLineMethodDefinition(clean_lines, linenum):
4959  """Check if current line contains an out-of-line method definition.
4960 
4961  Args:
4962  clean_lines: A CleansedLines instance containing the file.
4963  linenum: The number of the line to check.
4964  Returns:
4965  True if current line contains an out-of-line method definition.
4966  """
4967  # Scan back a few lines for start of current function
4968  for i in xrange(linenum, max(-1, linenum - 10), -1):
4969  if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]):
4970  return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None
4971  return False
4972 
4973 
4974 def IsInitializerList(clean_lines, linenum):
4975  """Check if current line is inside constructor initializer list.
4976 
4977  Args:
4978  clean_lines: A CleansedLines instance containing the file.
4979  linenum: The number of the line to check.
4980  Returns:
4981  True if current line appears to be inside constructor initializer
4982  list, False otherwise.
4983  """
4984  for i in xrange(linenum, 1, -1):
4985  line = clean_lines.elided[i]
4986  if i == linenum:
4987  remove_function_body = Match(r'^(.*)\{\s*$', line)
4988  if remove_function_body:
4989  line = remove_function_body.group(1)
4990 
4991  if Search(r'\s:\s*\w+[({]', line):
4992  # A lone colon tend to indicate the start of a constructor
4993  # initializer list. It could also be a ternary operator, which
4994  # also tend to appear in constructor initializer lists as
4995  # opposed to parameter lists.
4996  return True
4997  if Search(r'\}\s*,\s*$', line):
4998  # A closing brace followed by a comma is probably the end of a
4999  # brace-initialized member in constructor initializer list.
5000  return True
5001  if Search(r'[{};]\s*$', line):
5002  # Found one of the following:
5003  # - A closing brace or semicolon, probably the end of the previous
5004  # function.
5005  # - An opening brace, probably the start of current class or namespace.
5006  #
5007  # Current line is probably not inside an initializer list since
5008  # we saw one of those things without seeing the starting colon.
5009  return False
5010 
5011  # Got to the beginning of the file without seeing the start of
5012  # constructor initializer list.
5013  return False
5014 
5015 
5016 def CheckForNonConstReference(filename, clean_lines, linenum,
5017  nesting_state, error):
5018  """Check for non-const references.
5019 
5020  Separate from CheckLanguage since it scans backwards from current
5021  line, instead of scanning forward.
5022 
5023  Args:
5024  filename: The name of the current file.
5025  clean_lines: A CleansedLines instance containing the file.
5026  linenum: The number of the line to check.
5027  nesting_state: A NestingState instance which maintains information about
5028  the current stack of nested blocks being parsed.
5029  error: The function to call with any errors found.
5030  """
5031  # Do nothing if there is no '&' on current line.
5032  line = clean_lines.elided[linenum]
5033  if '&' not in line:
5034  return
5035 
5036  # If a function is inherited, current function doesn't have much of
5037  # a choice, so any non-const references should not be blamed on
5038  # derived function.
5039  if IsDerivedFunction(clean_lines, linenum):
5040  return
5041 
5042  # Don't warn on out-of-line method definitions, as we would warn on the
5043  # in-line declaration, if it isn't marked with 'override'.
5044  if IsOutOfLineMethodDefinition(clean_lines, linenum):
5045  return
5046 
5047  # Long type names may be broken across multiple lines, usually in one
5048  # of these forms:
5049  # LongType
5050  # ::LongTypeContinued &identifier
5051  # LongType::
5052  # LongTypeContinued &identifier
5053  # LongType<
5054  # ...>::LongTypeContinued &identifier
5055  #
5056  # If we detected a type split across two lines, join the previous
5057  # line to current line so that we can match const references
5058  # accordingly.
5059  #
5060  # Note that this only scans back one line, since scanning back
5061  # arbitrary number of lines would be expensive. If you have a type
5062  # that spans more than 2 lines, please use a typedef.
5063  if linenum > 1:
5064  previous = None
5065  if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
5066  # previous_line\n + ::current_line
5067  previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
5068  clean_lines.elided[linenum - 1])
5069  elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
5070  # previous_line::\n + current_line
5071  previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
5072  clean_lines.elided[linenum - 1])
5073  if previous:
5074  line = previous.group(1) + line.lstrip()
5075  else:
5076  # Check for templated parameter that is split across multiple lines
5077  endpos = line.rfind('>')
5078  if endpos > -1:
5079  (_, startline, startpos) = ReverseCloseExpression(
5080  clean_lines, linenum, endpos)
5081  if startpos > -1 and startline < linenum:
5082  # Found the matching < on an earlier line, collect all
5083  # pieces up to current line.
5084  line = ''
5085  for i in xrange(startline, linenum + 1):
5086  line += clean_lines.elided[i].strip()
5087 
5088  # Check for non-const references in function parameters. A single '&' may
5089  # found in the following places:
5090  # inside expression: binary & for bitwise AND
5091  # inside expression: unary & for taking the address of something
5092  # inside declarators: reference parameter
5093  # We will exclude the first two cases by checking that we are not inside a
5094  # function body, including one that was just introduced by a trailing '{'.
5095  # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
5096  if (nesting_state.previous_stack_top and
5097  not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or
5098  isinstance(nesting_state.previous_stack_top, _NamespaceInfo))):
5099  # Not at toplevel, not within a class, and not within a namespace
5100  return
5101 
5102  # Avoid initializer lists. We only need to scan back from the
5103  # current line for something that starts with ':'.
5104  #
5105  # We don't need to check the current line, since the '&' would
5106  # appear inside the second set of parentheses on the current line as
5107  # opposed to the first set.
5108  if linenum > 0:
5109  for i in xrange(linenum - 1, max(0, linenum - 10), -1):
5110  previous_line = clean_lines.elided[i]
5111  if not Search(r'[),]\s*$', previous_line):
5112  break
5113  if Match(r'^\s*:\s+\S', previous_line):
5114  return
5115 
5116  # Avoid preprocessors
5117  if Search(r'\\\s*$', line):
5118  return
5119 
5120  # Avoid constructor initializer lists
5121  if IsInitializerList(clean_lines, linenum):
5122  return
5123 
5124  # We allow non-const references in a few standard places, like functions
5125  # called "swap()" or iostream operators like "<<" or ">>". Do not check
5126  # those function parameters.
5127  #
5128  # We also accept & in static_assert, which looks like a function but
5129  # it's actually a declaration expression.
5130  whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
5131  r'operator\s*[<>][<>]|'
5132  r'static_assert|COMPILE_ASSERT'
5133  r')\s*\(')
5134  if Search(whitelisted_functions, line):
5135  return
5136  elif not Search(r'\S+\([^)]*$', line):
5137  # Don't see a whitelisted function on this line. Actually we
5138  # didn't see any function name on this line, so this is likely a
5139  # multi-line parameter list. Try a bit harder to catch this case.
5140  for i in xrange(2):
5141  if (linenum > i and
5142  Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
5143  return
5144 
5145  decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body
5146  for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
5147  if (not Match(_RE_PATTERN_CONST_REF_PARAM, parameter) and
5148  not Match(_RE_PATTERN_REF_STREAM_PARAM, parameter)):
5149  error(filename, linenum, 'runtime/references', 2,
5150  'Is this a non-const reference? '
5151  'If so, make const or use a pointer: ' +
5152  ReplaceAll(' *<', '<', parameter))
5153 
5154 
5155 def CheckCasts(filename, clean_lines, linenum, error):
5156  """Various cast related checks.
5157 
5158  Args:
5159  filename: The name of the current file.
5160  clean_lines: A CleansedLines instance containing the file.
5161  linenum: The number of the line to check.
5162  error: The function to call with any errors found.
5163  """
5164  line = clean_lines.elided[linenum]
5165 
5166  # Check to see if they're using an conversion function cast.
5167  # I just try to capture the most common basic types, though there are more.
5168  # Parameterless conversion functions, such as bool(), are allowed as they are
5169  # probably a member operator declaration or default constructor.
5170  match = Search(
5171  r'(\bnew\s+(?:const\s+)?|\S<\s*(?:const\s+)?)?\b'
5172  r'(int|float|double|bool|char|int32|uint32|int64|uint64)'
5173  r'(\([^)].*)', line)
5174  expecting_function = ExpectingFunctionArgs(clean_lines, linenum)
5175  if match and not expecting_function:
5176  matched_type = match.group(2)
5177 
5178  # matched_new_or_template is used to silence two false positives:
5179  # - New operators
5180  # - Template arguments with function types
5181  #
5182  # For template arguments, we match on types immediately following
5183  # an opening bracket without any spaces. This is a fast way to
5184  # silence the common case where the function type is the first
5185  # template argument. False negative with less-than comparison is
5186  # avoided because those operators are usually followed by a space.
5187  #
5188  # function<double(double)> // bracket + no space = false positive
5189  # value < double(42) // bracket + space = true positive
5190  matched_new_or_template = match.group(1)
5191 
5192  # Avoid arrays by looking for brackets that come after the closing
5193  # parenthesis.
5194  if Match(r'\([^()]+\)\s*\[', match.group(3)):
5195  return
5196 
5197  # Other things to ignore:
5198  # - Function pointers
5199  # - Casts to pointer types
5200  # - Placement new
5201  # - Alias declarations
5202  matched_funcptr = match.group(3)
5203  if (matched_new_or_template is None and
5204  not (matched_funcptr and
5205  (Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(',
5206  matched_funcptr) or
5207  matched_funcptr.startswith('(*)'))) and
5208  not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and
5209  not Search(r'new\(\S+\)\s*' + matched_type, line)):
5210  error(filename, linenum, 'readability/casting', 4,
5211  'Using deprecated casting style. '
5212  'Use static_cast<%s>(...) instead' %
5213  matched_type)
5214 
5215  if not expecting_function:
5216  CheckCStyleCast(filename, clean_lines, linenum, 'static_cast',
5217  r'\((int|float|double|bool|char|u?int(16|32|64))\)', error)
5218 
5219  # This doesn't catch all cases. Consider (const char * const)"hello".
5220  #
5221  # (char *) "foo" should always be a const_cast (reinterpret_cast won't
5222  # compile).
5223  if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast',
5224  r'\((char\s?\*+\s?)\)\s*"', error):
5225  pass
5226  else:
5227  # Check pointer casts for other than string constants
5228  CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast',
5229  r'\((\w+\s?\*+\s?)\)', error)
5230 
5231  # In addition, we look for people taking the address of a cast. This
5232  # is dangerous -- casts can assign to temporaries, so the pointer doesn't
5233  # point where you think.
5234  #
5235  # Some non-identifier character is required before the '&' for the
5236  # expression to be recognized as a cast. These are casts:
5237  # expression = &static_cast<int*>(temporary());
5238  # function(&(int*)(temporary()));
5239  #
5240  # This is not a cast:
5241  # reference_type&(int* function_param);
5242  match = Search(
5243  r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|'
5244  r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line)
5245  if match:
5246  # Try a better error message when the & is bound to something
5247  # dereferenced by the casted pointer, as opposed to the casted
5248  # pointer itself.
5249  parenthesis_error = False
5250  match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line)
5251  if match:
5252  _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1)))
5253  if x1 >= 0 and clean_lines.elided[y1][x1] == '(':
5254  _, y2, x2 = CloseExpression(clean_lines, y1, x1)
5255  if x2 >= 0:
5256  extended_line = clean_lines.elided[y2][x2:]
5257  if y2 < clean_lines.NumLines() - 1:
5258  extended_line += clean_lines.elided[y2 + 1]
5259  if Match(r'\s*(?:->|\[)', extended_line):
5260  parenthesis_error = True
5261 
5262  if parenthesis_error:
5263  error(filename, linenum, 'readability/casting', 4,
5264  ('Are you taking an address of something dereferenced '
5265  'from a cast? Wrapping the dereferenced expression in '
5266  'parentheses will make the binding more obvious'))
5267  else:
5268  error(filename, linenum, 'runtime/casting', 4,
5269  ('Are you taking an address of a cast? '
5270  'This is dangerous: could be a temp var. '
5271  'Take the address before doing the cast, rather than after'))
5272 
5273 
5274 def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error):
5275  """Checks for a C-style cast by looking for the pattern.
5276 
5277  Args:
5278  filename: The name of the current file.
5279  clean_lines: A CleansedLines instance containing the file.
5280  linenum: The number of the line to check.
5281  cast_type: The string for the C++ cast to recommend. This is either
5282  reinterpret_cast, static_cast, or const_cast, depending.
5283  pattern: The regular expression used to find C-style casts.
5284  error: The function to call with any errors found.
5285 
5286  Returns:
5287  True if an error was emitted.
5288  False otherwise.
5289  """
5290  line = clean_lines.elided[linenum]
5291  match = Search(pattern, line)
5292  if not match:
5293  return False
5294 
5295  # Exclude lines with keywords that tend to look like casts
5296  context = line[0:match.start(1) - 1]
5297  if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context):
5298  return False
5299 
5300  # Try expanding current context to see if we one level of
5301  # parentheses inside a macro.
5302  if linenum > 0:
5303  for i in xrange(linenum - 1, max(0, linenum - 5), -1):
5304  context = clean_lines.elided[i] + context
5305  if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
5306  return False
5307 
5308  # operator++(int) and operator--(int)
5309  if context.endswith(' operator++') or context.endswith(' operator--'):
5310  return False
5311 
5312  # A single unnamed argument for a function tends to look like old style cast.
5313  # If we see those, don't issue warnings for deprecated casts.
5314  remainder = line[match.end(0):]
5315  if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)',
5316  remainder):
5317  return False
5318 
5319  # At this point, all that should be left is actual casts.
5320  error(filename, linenum, 'readability/casting', 4,
5321  'Using C-style cast. Use %s<%s>(...) instead' %
5322  (cast_type, match.group(1)))
5323 
5324  return True
5325 
5326 
5327 def ExpectingFunctionArgs(clean_lines, linenum):
5328  """Checks whether where function type arguments are expected.
5329 
5330  Args:
5331  clean_lines: A CleansedLines instance containing the file.
5332  linenum: The number of the line to check.
5333 
5334  Returns:
5335  True if the line at 'linenum' is inside something that expects arguments
5336  of function types.
5337  """
5338  line = clean_lines.elided[linenum]
5339  return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
5340  (linenum >= 2 and
5341  (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
5342  clean_lines.elided[linenum - 1]) or
5343  Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
5344  clean_lines.elided[linenum - 2]) or
5345  Search(r'\bstd::m?function\s*<\s*$',
5346  clean_lines.elided[linenum - 1]))))
5347 
5348 
5349 _HEADERS_CONTAINING_TEMPLATES = (
5350  ('<deque>', ('deque',)),
5351  ('<functional>', ('unary_function', 'binary_function',
5352  'plus', 'minus', 'multiplies', 'divides', 'modulus',
5353  'negate',
5354  'equal_to', 'not_equal_to', 'greater', 'less',
5355  'greater_equal', 'less_equal',
5356  'logical_and', 'logical_or', 'logical_not',
5357  'unary_negate', 'not1', 'binary_negate', 'not2',
5358  'bind1st', 'bind2nd',
5359  'pointer_to_unary_function',
5360  'pointer_to_binary_function',
5361  'ptr_fun',
5362  'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t',
5363  'mem_fun_ref_t',
5364  'const_mem_fun_t', 'const_mem_fun1_t',
5365  'const_mem_fun_ref_t', 'const_mem_fun1_ref_t',
5366  'mem_fun_ref',
5367  )),
5368  ('<limits>', ('numeric_limits',)),
5369  ('<list>', ('list',)),
5370  ('<map>', ('map', 'multimap',)),
5371  ('<memory>', ('allocator', 'make_shared', 'make_unique', 'shared_ptr',
5372  'unique_ptr', 'weak_ptr')),
5373  ('<queue>', ('queue', 'priority_queue',)),
5374  ('<set>', ('set', 'multiset',)),
5375  ('<stack>', ('stack',)),
5376  ('<string>', ('char_traits', 'basic_string',)),
5377  ('<tuple>', ('tuple',)),
5378  ('<unordered_map>', ('unordered_map', 'unordered_multimap')),
5379  ('<unordered_set>', ('unordered_set', 'unordered_multiset')),
5380  ('<utility>', ('pair',)),
5381  ('<vector>', ('vector',)),
5382 
5383  # gcc extensions.
5384  # Note: std::hash is their hash, ::hash is our hash
5385  ('<hash_map>', ('hash_map', 'hash_multimap',)),
5386  ('<hash_set>', ('hash_set', 'hash_multiset',)),
5387  ('<slist>', ('slist',)),
5388  )
5389 
5390 _HEADERS_MAYBE_TEMPLATES = (
5391  ('<algorithm>', ('copy', 'max', 'min', 'min_element', 'sort',
5392  'transform',
5393  )),
5394  ('<utility>', ('forward', 'make_pair', 'move', 'swap')),
5395  )
5396 
5397 _RE_PATTERN_STRING = re.compile(r'\bstring\b')
5398 
5399 _re_pattern_headers_maybe_templates = []
5400 for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
5401  for _template in _templates:
5402  # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
5403  # type::max().
5404  _re_pattern_headers_maybe_templates.append(
5405  (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
5406  _template,
5407  _header))
5408 
5409 # Other scripts may reach in and modify this pattern.
5410 _re_pattern_templates = []
5411 for _header, _templates in _HEADERS_CONTAINING_TEMPLATES:
5412  for _template in _templates:
5413  _re_pattern_templates.append(
5414  (re.compile(r'(<|\b)' + _template + r'\s*<'),
5415  _template + '<>',
5416  _header))
5417 
5418 
5419 def FilesBelongToSameModule(filename_cc, filename_h):
5420  """Check if these two filenames belong to the same module.
5421 
5422  The concept of a 'module' here is a as follows:
5423  foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
5424  same 'module' if they are in the same directory.
5425  some/path/public/xyzzy and some/path/internal/xyzzy are also considered
5426  to belong to the same module here.
5427 
5428  If the filename_cc contains a longer path than the filename_h, for example,
5429  '/absolute/path/to/base/sysinfo.cc', and this file would include
5430  'base/sysinfo.h', this function also produces the prefix needed to open the
5431  header. This is used by the caller of this function to more robustly open the
5432  header file. We don't have access to the real include paths in this context,
5433  so we need this guesswork here.
5434 
5435  Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
5436  according to this implementation. Because of this, this function gives
5437  some false positives. This should be sufficiently rare in practice.
5438 
5439  Args:
5440  filename_cc: is the path for the .cc file
5441  filename_h: is the path for the header path
5442 
5443  Returns:
5444  Tuple with a bool and a string:
5445  bool: True if filename_cc and filename_h belong to the same module.
5446  string: the additional prefix needed to open the header file.
5447  """
5448 
5449  fileinfo = FileInfo(filename_cc)
5450  if not fileinfo.IsSource():
5451  return (False, '')
5452  filename_cc = filename_cc[:-len(fileinfo.Extension())]
5453  matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo.BaseName())
5454  if matched_test_suffix:
5455  filename_cc = filename_cc[:-len(matched_test_suffix.group(1))]
5456  filename_cc = filename_cc.replace('/public/', '/')
5457  filename_cc = filename_cc.replace('/internal/', '/')
5458 
5459  if not filename_h.endswith('.h'):
5460  return (False, '')
5461  filename_h = filename_h[:-len('.h')]
5462  if filename_h.endswith('-inl'):
5463  filename_h = filename_h[:-len('-inl')]
5464  filename_h = filename_h.replace('/public/', '/')
5465  filename_h = filename_h.replace('/internal/', '/')
5466 
5467  files_belong_to_same_module = filename_cc.endswith(filename_h)
5468  common_path = ''
5469  if files_belong_to_same_module:
5470  common_path = filename_cc[:-len(filename_h)]
5471  return files_belong_to_same_module, common_path
5472 
5473 
5474 def UpdateIncludeState(filename, include_dict, io=codecs):
5475  """Fill up the include_dict with new includes found from the file.
5476 
5477  Args:
5478  filename: the name of the header to read.
5479  include_dict: a dictionary in which the headers are inserted.
5480  io: The io factory to use to read the file. Provided for testability.
5481 
5482  Returns:
5483  True if a header was successfully added. False otherwise.
5484  """
5485  headerfile = None
5486  try:
5487  headerfile = io.open(filename, 'r', 'utf8', 'replace')
5488  except IOError:
5489  return False
5490  linenum = 0
5491  for line in headerfile:
5492  linenum += 1
5493  clean_line = CleanseComments(line)
5494  match = _RE_PATTERN_INCLUDE.search(clean_line)
5495  if match:
5496  include = match.group(2)
5497  include_dict.setdefault(include, linenum)
5498  return True
5499 
5500 
5501 def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
5502  io=codecs):
5503  """Reports for missing stl includes.
5504 
5505  This function will output warnings to make sure you are including the headers
5506  necessary for the stl containers and functions that you use. We only give one
5507  reason to include a header. For example, if you use both equal_to<> and
5508  less<> in a .h file, only one (the latter in the file) of these will be
5509  reported as a reason to include the <functional>.
5510 
5511  Args:
5512  filename: The name of the current file.
5513  clean_lines: A CleansedLines instance containing the file.
5514  include_state: An _IncludeState instance.
5515  error: The function to call with any errors found.
5516  io: The IO factory to use to read the header file. Provided for unittest
5517  injection.
5518  """
5519  required = {} # A map of header name to linenumber and the template entity.
5520  # Example of required: { '<functional>': (1219, 'less<>') }
5521 
5522  for linenum in xrange(clean_lines.NumLines()):
5523  line = clean_lines.elided[linenum]
5524  if not line or line[0] == '#':
5525  continue
5526 
5527  # String is special -- it is a non-templatized type in STL.
5528  matched = _RE_PATTERN_STRING.search(line)
5529  if matched:
5530  # Don't warn about strings in non-STL namespaces:
5531  # (We check only the first match per line; good enough.)
5532  prefix = line[:matched.start()]
5533  if prefix.endswith('std::') or not prefix.endswith('::'):
5534  required['<string>'] = (linenum, 'string')
5535 
5536  for pattern, template, header in _re_pattern_headers_maybe_templates:
5537  if pattern.search(line):
5538  required[header] = (linenum, template)
5539 
5540  # The following function is just a speed up, no semantics are changed.
5541  if not '<' in line: # Reduces the cpu time usage by skipping lines.
5542  continue
5543 
5544  for pattern, template, header in _re_pattern_templates:
5545  matched = pattern.search(line)
5546  if matched:
5547  # Don't warn about IWYU in non-STL namespaces:
5548  # (We check only the first match per line; good enough.)
5549  prefix = line[:matched.start()]
5550  if prefix.endswith('std::') or not prefix.endswith('::'):
5551  required[header] = (linenum, template)
5552 
5553  # The policy is that if you #include something in foo.h you don't need to
5554  # include it again in foo.cc. Here, we will look at possible includes.
5555  # Let's flatten the include_state include_list and copy it into a dictionary.
5556  include_dict = dict([item for sublist in include_state.include_list
5557  for item in sublist])
5558 
5559  # Did we find the header for this file (if any) and successfully load it?
5560  header_found = False
5561 
5562  # Use the absolute path so that matching works properly.
5563  abs_filename = FileInfo(filename).FullName()
5564 
5565  # For Emacs's flymake.
5566  # If cpplint is invoked from Emacs's flymake, a temporary file is generated
5567  # by flymake and that file name might end with '_flymake.cc'. In that case,
5568  # restore original file name here so that the corresponding header file can be
5569  # found.
5570  # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h'
5571  # instead of 'foo_flymake.h'
5572  abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename)
5573 
5574  # include_dict is modified during iteration, so we iterate over a copy of
5575  # the keys.
5576  header_keys = include_dict.keys()
5577  for header in header_keys:
5578  (same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
5579  fullpath = common_path + header
5580  if same_module and UpdateIncludeState(fullpath, include_dict, io):
5581  header_found = True
5582 
5583  # If we can't find the header file for a .cc, assume it's because we don't
5584  # know where to look. In that case we'll give up as we're not sure they
5585  # didn't include it in the .h file.
5586  # TODO(unknown): Do a better job of finding .h files so we are confident that
5587  # not having the .h file means there isn't one.
5588  if filename.endswith('.cc') and not header_found:
5589  return
5590 
5591  # All the lines have been processed, report the errors found.
5592  for required_header_unstripped in required:
5593  template = required[required_header_unstripped][1]
5594  if required_header_unstripped.strip('<>"') not in include_dict:
5595  error(filename, required[required_header_unstripped][0],
5596  'build/include_what_you_use', 4,
5597  'Add #include ' + required_header_unstripped + ' for ' + template)
5598 
5599 
5600 _RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<')
5601 
5602 
5603 def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
5604  """Check that make_pair's template arguments are deduced.
5605 
5606  G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are
5607  specified explicitly, and such use isn't intended in any case.
5608 
5609  Args:
5610  filename: The name of the current file.
5611  clean_lines: A CleansedLines instance containing the file.
5612  linenum: The number of the line to check.
5613  error: The function to call with any errors found.
5614  """
5615  line = clean_lines.elided[linenum]
5616  match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
5617  if match:
5618  error(filename, linenum, 'build/explicit_make_pair',
5619  4, # 4 = high confidence
5620  'For C++11-compatibility, omit template arguments from make_pair'
5621  ' OR use pair directly OR if appropriate, construct a pair directly')
5622 
5623 
5624 def CheckRedundantVirtual(filename, clean_lines, linenum, error):
5625  """Check if line contains a redundant "virtual" function-specifier.
5626 
5627  Args:
5628  filename: The name of the current file.
5629  clean_lines: A CleansedLines instance containing the file.
5630  linenum: The number of the line to check.
5631  error: The function to call with any errors found.
5632  """
5633  # Look for "virtual" on current line.
5634  line = clean_lines.elided[linenum]
5635  virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line)
5636  if not virtual: return
5637 
5638  # Ignore "virtual" keywords that are near access-specifiers. These
5639  # are only used in class base-specifier and do not apply to member
5640  # functions.
5641  if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or
5642  Match(r'^\s+(public|protected|private)\b', virtual.group(3))):
5643  return
5644 
5645  # Ignore the "virtual" keyword from virtual base classes. Usually
5646  # there is a column on the same line in these cases (virtual base
5647  # classes are rare in google3 because multiple inheritance is rare).
5648  if Match(r'^.*[^:]:[^:].*$', line): return
5649 
5650  # Look for the next opening parenthesis. This is the start of the
5651  # parameter list (possibly on the next line shortly after virtual).
5652  # TODO(unknown): doesn't work if there are virtual functions with
5653  # decltype() or other things that use parentheses, but csearch suggests
5654  # that this is rare.
5655  end_col = -1
5656  end_line = -1
5657  start_col = len(virtual.group(2))
5658  for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())):
5659  line = clean_lines.elided[start_line][start_col:]
5660  parameter_list = Match(r'^([^(]*)\(', line)
5661  if parameter_list:
5662  # Match parentheses to find the end of the parameter list
5663  (_, end_line, end_col) = CloseExpression(
5664  clean_lines, start_line, start_col + len(parameter_list.group(1)))
5665  break
5666  start_col = 0
5667 
5668  if end_col < 0:
5669  return # Couldn't find end of parameter list, give up
5670 
5671  # Look for "override" or "final" after the parameter list
5672  # (possibly on the next few lines).
5673  for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())):
5674  line = clean_lines.elided[i][end_col:]
5675  match = Search(r'\b(override|final)\b', line)
5676  if match:
5677  error(filename, linenum, 'readability/inheritance', 4,
5678  ('"virtual" is redundant since function is '
5679  'already declared as "%s"' % match.group(1)))
5680 
5681  # Set end_col to check whole lines after we are done with the
5682  # first line.
5683  end_col = 0
5684  if Search(r'[^\w]\s*$', line):
5685  break
5686 
5687 
5688 def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error):
5689  """Check if line contains a redundant "override" or "final" virt-specifier.
5690 
5691  Args:
5692  filename: The name of the current file.
5693  clean_lines: A CleansedLines instance containing the file.
5694  linenum: The number of the line to check.
5695  error: The function to call with any errors found.
5696  """
5697  # Look for closing parenthesis nearby. We need one to confirm where
5698  # the declarator ends and where the virt-specifier starts to avoid
5699  # false positives.
5700  line = clean_lines.elided[linenum]
5701  declarator_end = line.rfind(')')
5702  if declarator_end >= 0:
5703  fragment = line[declarator_end:]
5704  else:
5705  if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0:
5706  fragment = line
5707  else:
5708  return
5709 
5710  # Check that at most one of "override" or "final" is present, not both
5711  if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment):
5712  error(filename, linenum, 'readability/inheritance', 4,
5713  ('"override" is redundant since function is '
5714  'already declared as "final"'))
5715 
5716 
5717 
5718 
5719 # Returns true if we are at a new block, and it is directly
5720 # inside of a namespace.
5721 def IsBlockInNameSpace(nesting_state, is_forward_declaration):
5722  """Checks that the new block is directly in a namespace.
5723 
5724  Args:
5725  nesting_state: The _NestingState object that contains info about our state.
5726  is_forward_declaration: If the class is a forward declared class.
5727  Returns:
5728  Whether or not the new block is directly in a namespace.
5729  """
5730  if is_forward_declaration:
5731  if len(nesting_state.stack) >= 1 and (
5732  isinstance(nesting_state.stack[-1], _NamespaceInfo)):
5733  return True
5734  else:
5735  return False
5736 
5737  return (len(nesting_state.stack) > 1 and
5738  nesting_state.stack[-1].check_namespace_indentation and
5739  isinstance(nesting_state.stack[-2], _NamespaceInfo))
5740 
5741 
5742 def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item,
5743  raw_lines_no_comments, linenum):
5744  """This method determines if we should apply our namespace indentation check.
5745 
5746  Args:
5747  nesting_state: The current nesting state.
5748  is_namespace_indent_item: If we just put a new class on the stack, True.
5749  If the top of the stack is not a class, or we did not recently
5750  add the class, False.
5751  raw_lines_no_comments: The lines without the comments.
5752  linenum: The current line number we are processing.
5753 
5754  Returns:
5755  True if we should apply our namespace indentation check. Currently, it
5756  only works for classes and namespaces inside of a namespace.
5757  """
5758 
5759  is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments,
5760  linenum)
5761 
5762  if not (is_namespace_indent_item or is_forward_declaration):
5763  return False
5764 
5765  # If we are in a macro, we do not want to check the namespace indentation.
5766  if IsMacroDefinition(raw_lines_no_comments, linenum):
5767  return False
5768 
5769  return IsBlockInNameSpace(nesting_state, is_forward_declaration)
5770 
5771 
5772 # Call this method if the line is directly inside of a namespace.
5773 # If the line above is blank (excluding comments) or the start of
5774 # an inner namespace, it cannot be indented.
5775 def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum,
5776  error):
5777  line = raw_lines_no_comments[linenum]
5778  if Match(r'^\s+', line):
5779  error(filename, linenum, 'runtime/indentation_namespace', 4,
5780  'Do not indent within a namespace')
5781 
5782 
5783 def ProcessLine(filename, file_extension, clean_lines, line,
5784  include_state, function_state, nesting_state, error,
5785  extra_check_functions=[]):
5786  """Processes a single line in the file.
5787 
5788  Args:
5789  filename: Filename of the file that is being processed.
5790  file_extension: The extension (dot not included) of the file.
5791  clean_lines: An array of strings, each representing a line of the file,
5792  with comments stripped.
5793  line: Number of line being processed.
5794  include_state: An _IncludeState instance in which the headers are inserted.
5795  function_state: A _FunctionState instance which counts function lines, etc.
5796  nesting_state: A NestingState instance which maintains information about
5797  the current stack of nested blocks being parsed.
5798  error: A callable to which errors are reported, which takes 4 arguments:
5799  filename, line number, error level, and message
5800  extra_check_functions: An array of additional check functions that will be
5801  run on each source line. Each function takes 4
5802  arguments: filename, clean_lines, line, error
5803  """
5804  raw_lines = clean_lines.raw_lines
5805  ParseNolintSuppressions(filename, raw_lines[line], line, error)
5806  nesting_state.Update(filename, clean_lines, line, error)
5807  CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line,
5808  error)
5809  if nesting_state.InAsmBlock(): return
5810  CheckForFunctionLengths(filename, clean_lines, line, function_state, error)
5811  CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error)
5812  CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error)
5813  CheckLanguage(filename, clean_lines, line, file_extension, include_state,
5814  nesting_state, error)
5815  CheckForNonConstReference(filename, clean_lines, line, nesting_state, error)
5816  CheckForNonStandardConstructs(filename, clean_lines, line,
5817  nesting_state, error)
5818  CheckVlogArguments(filename, clean_lines, line, error)
5819  CheckPosixThreading(filename, clean_lines, line, error)
5820  CheckInvalidIncrement(filename, clean_lines, line, error)
5821  CheckMakePairUsesDeduction(filename, clean_lines, line, error)
5822  CheckRedundantVirtual(filename, clean_lines, line, error)
5823  CheckRedundantOverrideOrFinal(filename, clean_lines, line, error)
5824  for check_fn in extra_check_functions:
5825  check_fn(filename, clean_lines, line, error)
5826 
5827 def FlagCxx11Features(filename, clean_lines, linenum, error):
5828  """Flag those c++11 features that we only allow in certain places.
5829 
5830  Args:
5831  filename: The name of the current file.
5832  clean_lines: A CleansedLines instance containing the file.
5833  linenum: The number of the line to check.
5834  error: The function to call with any errors found.
5835  """
5836  line = clean_lines.elided[linenum]
5837 
5838  include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
5839 
5840  # Flag unapproved C++ TR1 headers.
5841  if include and include.group(1).startswith('tr1/'):
5842  error(filename, linenum, 'build/c++tr1', 5,
5843  ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1))
5844 
5845  # Flag unapproved C++11 headers.
5846  if include and include.group(1) in ('cfenv',
5847  'condition_variable',
5848  'fenv.h',
5849  'future',
5850  'mutex',
5851  'thread',
5852  'chrono',
5853  'ratio',
5854  'regex',
5855  'system_error',
5856  ):
5857  error(filename, linenum, 'build/c++11', 5,
5858  ('<%s> is an unapproved C++11 header.') % include.group(1))
5859 
5860  # The only place where we need to worry about C++11 keywords and library
5861  # features in preprocessor directives is in macro definitions.
5862  if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return
5863 
5864  # These are classes and free functions. The classes are always
5865  # mentioned as std::*, but we only catch the free functions if
5866  # they're not found by ADL. They're alphabetical by header.
5867  for top_name in (
5868  # type_traits
5869  'alignment_of',
5870  'aligned_union',
5871  ):
5872  if Search(r'\bstd::%s\b' % top_name, line):
5873  error(filename, linenum, 'build/c++11', 5,
5874  ('std::%s is an unapproved C++11 class or function. Send c-style '
5875  'an example of where it would make your code more readable, and '
5876  'they may let you use it.') % top_name)
5877 
5878 
5879 def FlagCxx14Features(filename, clean_lines, linenum, error):
5880  """Flag those C++14 features that we restrict.
5881 
5882  Args:
5883  filename: The name of the current file.
5884  clean_lines: A CleansedLines instance containing the file.
5885  linenum: The number of the line to check.
5886  error: The function to call with any errors found.
5887  """
5888  line = clean_lines.elided[linenum]
5889 
5890  include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line)
5891 
5892  # Flag unapproved C++14 headers.
5893  if include and include.group(1) in ('scoped_allocator', 'shared_mutex'):
5894  error(filename, linenum, 'build/c++14', 5,
5895  ('<%s> is an unapproved C++14 header.') % include.group(1))
5896 
5897 
5898 def ProcessFileData(filename, file_extension, lines, error,
5899  extra_check_functions=[]):
5900  """Performs lint checks and reports any errors to the given error function.
5901 
5902  Args:
5903  filename: Filename of the file that is being processed.
5904  file_extension: The extension (dot not included) of the file.
5905  lines: An array of strings, each representing a line of the file, with the
5906  last element being empty if the file is terminated with a newline.
5907  error: A callable to which errors are reported, which takes 4 arguments:
5908  filename, line number, error level, and message
5909  extra_check_functions: An array of additional check functions that will be
5910  run on each source line. Each function takes 4
5911  arguments: filename, clean_lines, line, error
5912  """
5913  lines = (['// marker so line numbers and indices both start at 1'] + lines +
5914  ['// marker so line numbers end in a known way'])
5915 
5916  include_state = _IncludeState()
5917  function_state = _FunctionState()
5918  nesting_state = NestingState()
5919 
5921 
5922  CheckForCopyright(filename, lines, error)
5924  RemoveMultiLineComments(filename, lines, error)
5925  clean_lines = CleansedLines(lines)
5926 
5927  if IsHeaderExtension(file_extension):
5928  CheckForHeaderGuard(filename, clean_lines, error)
5929 
5930  for line in xrange(clean_lines.NumLines()):
5931  ProcessLine(filename, file_extension, clean_lines, line,
5932  include_state, function_state, nesting_state, error,
5933  extra_check_functions)
5934  FlagCxx11Features(filename, clean_lines, line, error)
5935  nesting_state.CheckCompletedBlocks(filename, error)
5936 
5937  CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error)
5938 
5939  # Check that the .cc file has included its header if it exists.
5940  if _IsSourceExtension(file_extension):
5941  CheckHeaderFileIncluded(filename, include_state, error)
5942 
5943  # We check here rather than inside ProcessLine so that we see raw
5944  # lines rather than "cleaned" lines.
5945  CheckForBadCharacters(filename, lines, error)
5946 
5947  CheckForNewlineAtEOF(filename, lines, error)
5948 
5950  """ Loads the configuration files and processes the config overrides.
5951 
5952  Args:
5953  filename: The name of the file being processed by the linter.
5954 
5955  Returns:
5956  False if the current |filename| should not be processed further.
5957  """
5958 
5959  abs_filename = os.path.abspath(filename)
5960  cfg_filters = []
5961  keep_looking = True
5962  while keep_looking:
5963  abs_path, base_name = os.path.split(abs_filename)
5964  if not base_name:
5965  break # Reached the root directory.
5966 
5967  cfg_file = os.path.join(abs_path, "CPPLINT.cfg")
5968  abs_filename = abs_path
5969  if not os.path.isfile(cfg_file):
5970  continue
5971 
5972  try:
5973  with open(cfg_file) as file_handle:
5974  for line in file_handle:
5975  line, _, _ = line.partition('#') # Remove comments.
5976  if not line.strip():
5977  continue
5978 
5979  name, _, val = line.partition('=')
5980  name = name.strip()
5981  val = val.strip()
5982  if name == 'set noparent':
5983  keep_looking = False
5984  elif name == 'filter':
5985  cfg_filters.append(val)
5986  elif name == 'exclude_files':
5987  # When matching exclude_files pattern, use the base_name of
5988  # the current file name or the directory name we are processing.
5989  # For example, if we are checking for lint errors in /foo/bar/baz.cc
5990  # and we found the .cfg file at /foo/CPPLINT.cfg, then the config
5991  # file's "exclude_files" filter is meant to be checked against "bar"
5992  # and not "baz" nor "bar/baz.cc".
5993  if base_name:
5994  pattern = re.compile(val)
5995  if pattern.match(base_name):
5996  if _cpplint_state.quiet:
5997  # Suppress "Ignoring file" warning when using --quiet.
5998  return False
5999  sys.stderr.write('Ignoring "%s": file excluded by "%s". '
6000  'File path component "%s" matches '
6001  'pattern "%s"\n' %
6002  (filename, cfg_file, base_name, val))
6003  return False
6004  elif name == 'linelength':
6005  global _line_length
6006  try:
6007  _line_length = int(val)
6008  except ValueError:
6009  sys.stderr.write('Line length must be numeric.')
6010  elif name == 'root':
6011  global _root
6012  # root directories are specified relative to CPPLINT.cfg dir.
6013  _root = os.path.join(os.path.dirname(cfg_file), val)
6014  elif name == 'headers':
6016  else:
6017  sys.stderr.write(
6018  'Invalid configuration option (%s) in file %s\n' %
6019  (name, cfg_file))
6020 
6021  except IOError:
6022  sys.stderr.write(
6023  "Skipping config file '%s': Can't open for reading\n" % cfg_file)
6024  keep_looking = False
6025 
6026  # Apply all the accumulated filters in reverse order (top-level directory
6027  # config options having the least priority).
6028  for filter in reversed(cfg_filters):
6029  _AddFilters(filter)
6030 
6031  return True
6032 
6033 
6034 def ProcessFile(filename, vlevel, extra_check_functions=[]):
6035  """Does google-lint on a single file.
6036 
6037  Args:
6038  filename: The name of the file to parse.
6039 
6040  vlevel: The level of errors to report. Every error of confidence
6041  >= verbose_level will be reported. 0 is a good default.
6042 
6043  extra_check_functions: An array of additional check functions that will be
6044  run on each source line. Each function takes 4
6045  arguments: filename, clean_lines, line, error
6046  """
6047 
6048  _SetVerboseLevel(vlevel)
6049  _BackupFilters()
6050  old_errors = _cpplint_state.error_count
6051 
6052  if not ProcessConfigOverrides(filename):
6053  _RestoreFilters()
6054  return
6055 
6056  lf_lines = []
6057  crlf_lines = []
6058  try:
6059  # Support the UNIX convention of using "-" for stdin. Note that
6060  # we are not opening the file with universal newline support
6061  # (which codecs doesn't support anyway), so the resulting lines do
6062  # contain trailing '\r' characters if we are reading a file that
6063  # has CRLF endings.
6064  # If after the split a trailing '\r' is present, it is removed
6065  # below.
6066  if filename == '-':
6067  lines = codecs.StreamReaderWriter(sys.stdin,
6068  codecs.getreader('utf8'),
6069  codecs.getwriter('utf8'),
6070  'replace').read().split('\n')
6071  else:
6072  lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
6073 
6074  # Remove trailing '\r'.
6075  # The -1 accounts for the extra trailing blank line we get from split()
6076  for linenum in range(len(lines) - 1):
6077  if lines[linenum].endswith('\r'):
6078  lines[linenum] = lines[linenum].rstrip('\r')
6079  crlf_lines.append(linenum + 1)
6080  else:
6081  lf_lines.append(linenum + 1)
6082 
6083  except IOError:
6084  sys.stderr.write(
6085  "Skipping input '%s': Can't open for reading\n" % filename)
6086  _RestoreFilters()
6087  return
6088 
6089  # Note, if no dot is found, this will give the entire filename as the ext.
6090  file_extension = filename[filename.rfind('.') + 1:]
6091 
6092  # When reading from stdin, the extension is unknown, so no cpplint tests
6093  # should rely on the extension.
6094  if filename != '-' and file_extension not in _valid_extensions:
6095  sys.stderr.write('Ignoring %s; not a valid file name '
6096  '(%s)\n' % (filename, ', '.join(_valid_extensions)))
6097  else:
6098  ProcessFileData(filename, file_extension, lines, Error,
6099  extra_check_functions)
6100 
6101  # If end-of-line sequences are a mix of LF and CR-LF, issue
6102  # warnings on the lines with CR.
6103  #
6104  # Don't issue any warnings if all lines are uniformly LF or CR-LF,
6105  # since critique can handle these just fine, and the style guide
6106  # doesn't dictate a particular end of line sequence.
6107  #
6108  # We can't depend on os.linesep to determine what the desired
6109  # end-of-line sequence should be, since that will return the
6110  # server-side end-of-line sequence.
6111  if lf_lines and crlf_lines:
6112  # Warn on every line with CR. An alternative approach might be to
6113  # check whether the file is mostly CRLF or just LF, and warn on the
6114  # minority, we bias toward LF here since most tools prefer LF.
6115  for linenum in crlf_lines:
6116  Error(filename, linenum, 'whitespace/newline', 1,
6117  'Unexpected \\r (^M) found; better to use only \\n')
6118 
6119  # Suppress printing anything if --quiet was passed unless the error
6120  # count has increased after processing this file.
6121  if not _cpplint_state.quiet or old_errors != _cpplint_state.error_count:
6122  sys.stdout.write('Done processing %s\n' % filename)
6123  _RestoreFilters()
6124 
6125 
6126 def PrintUsage(message):
6127  """Prints a brief usage string and exits, optionally with an error message.
6128 
6129  Args:
6130  message: The optional error message.
6131  """
6132  sys.stderr.write(_USAGE)
6133  if message:
6134  sys.exit('\nFATAL ERROR: ' + message)
6135  else:
6136  sys.exit(1)
6137 
6138 
6140  """Prints a list of all the error-categories used by error messages.
6141 
6142  These are the categories used to filter messages via --filter.
6143  """
6144  sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES))
6145  sys.exit(0)
6146 
6147 
6148 def ParseArguments(args):
6149  """Parses the command line arguments.
6150 
6151  This may set the output format and verbosity level as side-effects.
6152 
6153  Args:
6154  args: The command line arguments:
6155 
6156  Returns:
6157  The list of filenames to lint.
6158  """
6159  try:
6160  (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
6161  'counting=',
6162  'filter=',
6163  'root=',
6164  'linelength=',
6165  'extensions=',
6166  'headers=',
6167  'quiet'])
6168  except getopt.GetoptError:
6169  PrintUsage('Invalid arguments.')
6170 
6171  verbosity = _VerboseLevel()
6172  output_format = _OutputFormat()
6173  filters = ''
6174  quiet = _Quiet()
6175  counting_style = ''
6176 
6177  for (opt, val) in opts:
6178  if opt == '--help':
6179  PrintUsage(None)
6180  elif opt == '--output':
6181  if val not in ('emacs', 'vs7', 'eclipse'):
6182  PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.')
6183  output_format = val
6184  elif opt == '--quiet':
6185  quiet = True
6186  elif opt == '--verbose':
6187  verbosity = int(val)
6188  elif opt == '--filter':
6189  filters = val
6190  if not filters:
6191  PrintCategories()
6192  elif opt == '--counting':
6193  if val not in ('total', 'toplevel', 'detailed'):
6194  PrintUsage('Valid counting options are total, toplevel, and detailed')
6195  counting_style = val
6196  elif opt == '--root':
6197  global _root
6198  _root = val
6199  elif opt == '--linelength':
6200  global _line_length
6201  try:
6202  _line_length = int(val)
6203  except ValueError:
6204  PrintUsage('Line length must be digits.')
6205  elif opt == '--extensions':
6206  global _valid_extensions
6207  try:
6208  _valid_extensions = set(val.split(','))
6209  except ValueError:
6210  PrintUsage('Extensions must be comma separated list.')
6211  elif opt == '--headers':
6213 
6214  if not filenames:
6215  PrintUsage('No files were specified.')
6216 
6217  _SetOutputFormat(output_format)
6218  _SetQuiet(quiet)
6219  _SetVerboseLevel(verbosity)
6220  _SetFilters(filters)
6221  _SetCountingStyle(counting_style)
6222 
6223  return filenames
6224 
6225 
6226 def main():
6227  filenames = ParseArguments(sys.argv[1:])
6228 
6229  _cpplint_state.ResetErrorCounts()
6230  for filename in filenames:
6231  ProcessFile(filename, _cpplint_state.verbose_level)
6232  # If --quiet is passed, suppress printing error count unless there are errors.
6233  if not _cpplint_state.quiet or _cpplint_state.error_count > 0:
6234  _cpplint_state.PrintErrorCounts()
6235 
6236  sys.exit(_cpplint_state.error_count > 0)
6237 
6238 
6239 if __name__ == '__main__':
6240  main()
def PrintUsage(message)
Definition: cpplint.py:6126
def ResetNolintSuppressions()
Definition: cpplint.py:636
def ParseArguments(args)
Definition: cpplint.py:6148
def _BackupFilters()
Definition: cpplint.py:1028
def CheckBegin(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2218
def CheckRedundantVirtual(filename, clean_lines, linenum, error)
Definition: cpplint.py:5624
def _SetOutputFormat(output_format)
Definition: cpplint.py:971
def CheckPrintf(filename, clean_lines, linenum, error)
Definition: cpplint.py:4907
def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error)
Definition: cpplint.py:5274
def Error(filename, linenum, category, confidence, message)
Definition: cpplint.py:1219
def InTemplateArgumentList(self, clean_lines, linenum, pos)
Definition: cpplint.py:2476
def FindNextMultiLineCommentStart(lines, lineix)
Definition: cpplint.py:1367
def _SetFilters(filters)
Definition: cpplint.py:1004
def _ShouldPrintError(category, confidence, linenum)
Definition: cpplint.py:1191
def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path)
Definition: cpplint.py:790
def _DropCommonSuffixes(filename)
Definition: cpplint.py:4442
def CheckCommaSpacing(filename, clean_lines, linenum, error)
Definition: cpplint.py:3455
def CheckParenthesisSpacing(filename, clean_lines, linenum, error)
Definition: cpplint.py:3417
def ProcessLine(filename, file_extension, clean_lines, line, include_state, function_state, nesting_state, error, extra_check_functions=[])
Definition: cpplint.py:5785
def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, raw_lines_no_comments, linenum)
Definition: cpplint.py:5743
def ReverseCloseExpression(clean_lines, linenum, pos)
Definition: cpplint.py:1721
def CheckComment(line, filename, linenum, next_line_start, error)
Definition: cpplint.py:3120
def ParseNolintSuppressions(filename, raw_line, linenum, error)
Definition: cpplint.py:586
def CheckBegin(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2298
def RemoveMultiLineComments(filename, lines, error)
Definition: cpplint.py:1395
def _SetQuiet(quiet)
Definition: cpplint.py:979
def CheckOperatorSpacing(filename, clean_lines, linenum, error)
Definition: cpplint.py:3302
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2303
def CheckForNewlineAtEOF(filename, lines, error)
Definition: cpplint.py:2036
def IsDecltype(clean_lines, linenum, column)
Definition: cpplint.py:3643
def __init__(self, name, class_or_struct, clean_lines, linenum)
Definition: cpplint.py:2268
def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
Definition: cpplint.py:2943
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2233
def AddFilters(self, filters)
Definition: cpplint.py:922
def ProcessGlobalSuppresions(lines)
Definition: cpplint.py:618
def CheckCheck(filename, clean_lines, linenum, error)
Definition: cpplint.py:4132
def IsForwardClassDeclaration(clean_lines, linenum)
Definition: cpplint.py:2204
def CheckTrailingSemicolon(filename, clean_lines, linenum, error)
Definition: cpplint.py:3858
def FindCheckMacro(line)
Definition: cpplint.py:4109
def CheckHeaderFileIncluded(filename, include_state, error)
Definition: cpplint.py:1986
def IsBlankLine(line)
Definition: cpplint.py:3020
def _ClassifyInclude(fileinfo, include, is_system)
Definition: cpplint.py:4469
def ResetSection(self, directive)
Definition: cpplint.py:754
def __init__(self, linenum, seen_open_brace)
Definition: cpplint.py:2211
def FindEndOfExpressionInLine(line, startpos, stack)
Definition: cpplint.py:1522
def FindStartOfExpressionInLine(line, endpos, stack)
Definition: cpplint.py:1644
def _VerboseLevel()
Definition: cpplint.py:984
def IsHeaderExtension(file_extension)
Definition: cpplint.py:583
def IsInitializerList(clean_lines, linenum)
Definition: cpplint.py:4974
def PathSplitToList(path)
Definition: cpplint.py:1787
def CheckInvalidIncrement(filename, clean_lines, linenum, error)
Definition: cpplint.py:2172
def RepositoryName(self)
Definition: cpplint.py:1114
def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
Definition: cpplint.py:3554
def __init__(self, linenum)
Definition: cpplint.py:2261
def Match(pattern, s)
Definition: cpplint.py:660
def IncrementErrorCount(self, category)
Definition: cpplint.py:946
def SetVerboseLevel(self, level)
Definition: cpplint.py:894
def GetPreviousNonBlankLine(clean_lines, linenum)
Definition: cpplint.py:3716
def FindNextMultiLineCommentEnd(lines, lineix)
Definition: cpplint.py:1378
def Search(pattern, s)
Definition: cpplint.py:688
def IsMacroDefinition(clean_lines, linenum)
Definition: cpplint.py:2194
def ProcessFile(filename, vlevel, extra_check_functions=[])
Definition: cpplint.py:6034
def __init__(self, lines)
Definition: cpplint.py:1438
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error)
Definition: cpplint.py:4529
def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, error)
Definition: cpplint.py:3036
def __init__(self, stack_before_if)
Definition: cpplint.py:2398
def GetHeaderGuardCPPVariable(filename)
Definition: cpplint.py:1812
def IsCppString(line)
Definition: cpplint.py:1274
def CheckCompletedBlocks(self, filename, error)
Definition: cpplint.py:2761
def CheckForNonStandardConstructs(filename, clean_lines, linenum, nesting_state, error)
Definition: cpplint.py:2784
def FindHeader(self, header)
Definition: cpplint.py:739
def _OutputFormat()
Definition: cpplint.py:966
def ReplaceAll(pattern, rep, s)
Definition: cpplint.py:670
def IsOutOfLineMethodDefinition(clean_lines, linenum)
Definition: cpplint.py:4958
def _IsSourceExtension(s)
Definition: cpplint.py:695
def IsBlockInNameSpace(nesting_state, is_forward_declaration)
Definition: cpplint.py:5721
def ProcessHppHeadersOption(val)
Definition: cpplint.py:574
def SetOutputFormat(self, output_format)
Definition: cpplint.py:884
def CleanseRawStrings(raw_lines)
Definition: cpplint.py:1291
def CanonicalizeAlphabeticalOrder(self, header_path)
Definition: cpplint.py:775
def FilesBelongToSameModule(filename_cc, filename_h)
Definition: cpplint.py:5419
def Begin(self, function_name)
Definition: cpplint.py:1047
def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum, error)
Definition: cpplint.py:5776
def CheckPosixThreading(filename, clean_lines, linenum, error)
Definition: cpplint.py:2122
def __init__(self, name, linenum)
Definition: cpplint.py:2337
def UpdateIncludeState(filename, include_dict, io=codecs)
Definition: cpplint.py:5474
def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state, error)
Definition: cpplint.py:5017
def CleanseComments(line)
Definition: cpplint.py:1411
def CheckForBadCharacters(filename, lines, error)
Definition: cpplint.py:2011
def PrintCategories()
Definition: cpplint.py:6139
def ProcessFileData(filename, file_extension, lines, error, extra_check_functions=[])
Definition: cpplint.py:5899
def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, error)
Definition: cpplint.py:4315
def CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
Definition: cpplint.py:3174
def GetLineWidth(line)
Definition: cpplint.py:4282
def _RestoreFilters()
Definition: cpplint.py:1032
def CheckLanguage(filename, clean_lines, linenum, file_extension, include_state, nesting_state, error)
Definition: cpplint.py:4689
def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error)
Definition: cpplint.py:2054
def __init__(self, filename)
Definition: cpplint.py:1107
def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error)
Definition: cpplint.py:3661
def CheckNextIncludeOrder(self, header_type)
Definition: cpplint.py:811
def Check(self, error, filename, linenum)
Definition: cpplint.py:1062
def SetCountingStyle(self, counting_style)
Definition: cpplint.py:900
def UpdatePreprocessor(self, line)
Definition: cpplint.py:2528
def _Filters()
Definition: cpplint.py:999
def SetFilters(self, filters)
Definition: cpplint.py:904
def _SetVerboseLevel(level)
Definition: cpplint.py:989
def IsErrorSuppressedByNolint(category, linenum)
Definition: cpplint.py:642
def CheckForHeaderGuard(filename, clean_lines, error)
Definition: cpplint.py:1888
def ExpectingFunctionArgs(clean_lines, linenum)
Definition: cpplint.py:5327
def CheckForFunctionLengths(filename, clean_lines, linenum, function_state, error)
Definition: cpplint.py:3050
def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error)
Definition: cpplint.py:5603
def CloseExpression(clean_lines, linenum, pos)
Definition: cpplint.py:1600
def CheckCasts(filename, clean_lines, linenum, error)
Definition: cpplint.py:5155
def RemoveMultiLineCommentsFromRange(lines, begin, end)
Definition: cpplint.py:1387
def CheckAltTokens(filename, clean_lines, linenum, error)
Definition: cpplint.py:4250
def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error)
Definition: cpplint.py:5688
def ProcessConfigOverrides(filename)
Definition: cpplint.py:5949
def _AddFilters(filters)
Definition: cpplint.py:1016
def Update(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2585
def _GetTextInside(text, start_pattern)
Definition: cpplint.py:4603
def CheckVlogArguments(filename, clean_lines, linenum, error)
Definition: cpplint.py:2148
def _Quiet()
Definition: cpplint.py:975
def CheckEmptyBlockBody(filename, clean_lines, linenum, error)
Definition: cpplint.py:4005
def CheckBraces(filename, clean_lines, linenum, error)
Definition: cpplint.py:3739
def SetQuiet(self, quiet)
Definition: cpplint.py:888
def IsDerivedFunction(clean_lines, linenum)
Definition: cpplint.py:4936
def CheckForCopyright(filename, lines, error)
Definition: cpplint.py:1759
def FlagCxx14Features(filename, clean_lines, linenum, error)
Definition: cpplint.py:5879
def GetIndentLevel(line)
Definition: cpplint.py:1772
def CheckEnd(self, filename, clean_lines, linenum, error)
Definition: cpplint.py:2342
def _SetCountingStyle(level)
Definition: cpplint.py:994
def CheckGlobalStatic(filename, clean_lines, linenum, error)
Definition: cpplint.py:4846
def SetLastHeader(self, header_path)
Definition: cpplint.py:772
def FlagCxx11Features(filename, clean_lines, linenum, error)
Definition: cpplint.py:5827
def _IsType(clean_lines, nesting_state, expr)
Definition: cpplint.py:3491
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, io=codecs)
Definition: cpplint.py:5502


roslint
Author(s): Mike Purvis, Jack O'Quin
autogenerated on Wed Mar 18 2020 03:20:56