summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorRefael Ackermann <refack@gmail.com>2019-04-05 08:15:08 -0400
committerRefael Ackermann <refack@gmail.com>2019-04-08 08:30:18 -0400
commit1302e0174a6d6917b7c2708bf1f9cb4841a041d0 (patch)
treea08594ee7b1b3ffa4c8bf6447971bf2308e210bd /tools
parent112cc7c27551254aa2b17098fb774867f05ed0d9 (diff)
downloadandroid-node-v8-1302e0174a6d6917b7c2708bf1f9cb4841a041d0.tar.gz
android-node-v8-1302e0174a6d6917b7c2708bf1f9cb4841a041d0.tar.bz2
android-node-v8-1302e0174a6d6917b7c2708bf1f9cb4841a041d0.zip
tools: bump cpplint.py to 1.4.4
Refs: https://github.com/cpplint/cpplint/releases/tag/1.4.4 PR-URL: https://github.com/nodejs/node/pull/27098 Reviewed-By: Joyee Cheung <joyeec9h3@gmail.com> Reviewed-By: Daniel Bevenius <daniel.bevenius@gmail.com>
Diffstat (limited to 'tools')
-rwxr-xr-xtools/cpplint.py604
1 files changed, 332 insertions, 272 deletions
diff --git a/tools/cpplint.py b/tools/cpplint.py
index aed35f3e0e..81bc98b7ac 100755
--- a/tools/cpplint.py
+++ b/tools/cpplint.py
@@ -52,46 +52,38 @@ import re
import sre_compile
import string
import sys
+import sysconfig
import unicodedata
import xml.etree.ElementTree
# if empty, use defaults
-_header_extensions = set([])
-
-# if empty, use defaults
_valid_extensions = set([])
+__VERSION__ = '1.4.4'
-# Files with any of these extensions are considered to be
-# header files (and will undergo different style checks).
-# This set can be extended by using the --headers
-# option (also supported in CPPLINT.cfg)
-def GetHeaderExtensions():
- if not _header_extensions:
- return set(['h', 'hh', 'hpp', 'hxx', 'h++', 'cuh'])
- return _header_extensions
-
-# The allowed extensions for file names
-# This is set by --extensions flag
-def GetAllExtensions():
- if not _valid_extensions:
- return GetHeaderExtensions().union(set(['c', 'cc', 'cpp', 'cxx', 'c++', 'cu']))
- return _valid_extensions
-
-def GetNonHeaderExtensions():
- return GetAllExtensions().difference(GetHeaderExtensions())
+try:
+ xrange # Python 2
+except NameError:
+ # -- pylint: disable=redefined-builtin
+ xrange = range # Python 3
_USAGE = """
Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
[--filter=-x,+y,...]
- [--counting=total|toplevel|detailed] [--repository=path]
- [--root=subdir] [--linelength=digits] [--recursive]
+ [--counting=total|toplevel|detailed] [--root=subdir]
+ [--repository=path]
+ [--linelength=digits] [--headers=x,y,...]
+ [--recursive]
[--exclude=path]
- [--headers=ext1,ext2]
[--extensions=hpp,cpp,...]
+ [--quiet]
+ [--version]
<file> [file] ...
+ Style checker for C/C++ source files.
+ This is a fork of the Google style checker with minor extensions.
+
The style guidelines this tries to follow are those in
https://google.github.io/styleguide/cppguide.html
@@ -111,10 +103,10 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
Flags:
output=emacs|eclipse|vs7|junit
- By default, the output is formatted to ease emacs parsing. Output
- compatible with eclipse (eclipse), Visual Studio (vs7), and JUnit
- XML parsers such as those used in Jenkins and Bamboo may also be
- used. Other formats are unsupported.
+ By default, the output is formatted to ease emacs parsing. Visual Studio
+ compatible output (vs7) may also be used. Further support exists for
+ eclipse (eclipse), and JUnit (junit). XML parsers such as those used
+ in Jenkins and Bamboo may also be used. Other formats are unsupported.
verbose=#
Specify a number 0-5 to restrict errors to certain verbosity levels.
@@ -122,8 +114,7 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
likely to be false positives.
quiet
- Suppress output other than linting errors, such as information about
- which files have been processed and excluded.
+ Don't print anything if no errors are found.
filter=-x,+y,...
Specify a comma-separated list of category-filters to apply: only
@@ -172,19 +163,21 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_
root=subdir
- The root directory used for deriving header guard CPP variables. This
- directory is relative to the top level directory of the repository which
- by default is determined by searching for a directory that contains .git,
- .hg, or .svn but can also be controlled with the --repository flag. If
- the specified directory does not exist, this flag is ignored.
+ The root directory used for deriving header guard CPP variable.
+ This directory is relative to the top level directory of the repository
+ which by default is determined by searching for a directory that contains
+ .git, .hg, or .svn but can also be controlled with the --repository flag.
+ If the specified directory does not exist, this flag is ignored.
Examples:
- Assuming that src is the top level directory of the repository, the
- header guard CPP variables for src/chrome/browser/ui/browser.h are:
+ Assuming that src is the top level directory of the repository (and
+ cwd=top/src), the header guard CPP variables for
+ src/chrome/browser/ui/browser.h are:
No flag => CHROME_BROWSER_UI_BROWSER_H_
--root=chrome => BROWSER_UI_BROWSER_H_
--root=chrome/browser => UI_BROWSER_H_
+ --root=.. => SRC_CHROME_BROWSER_UI_BROWSER_H_
linelength=digits
This is the allowed line length for the project. The default value is
@@ -216,13 +209,15 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
Examples:
--extensions=%s
- headers=extension,extension,...
- The allowed header extensions that cpplint will consider to be header files
- (by default, only files with extensions %s
- will be assumed to be headers)
+ headers=x,y,...
+ The header extensions that cpplint will treat as .h in checks. Values are
+ automatically added to --extensions list.
+ (by default, only files with extensions %s will be assumed to be headers)
Examples:
--headers=%s
+ --headers=hpp,hxx
+ --headers=hpp
cpplint.py supports per-directory configurations specified in CPPLINT.cfg
files. CPPLINT.cfg file can contain a number of key=value pairs.
@@ -233,6 +228,7 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
exclude_files=regex
linelength=80
root=subdir
+ headers=x,y,...
"set noparent" option prevents cpplint from traversing directory tree
upwards looking for more .cfg files in parent directories. This option
@@ -246,13 +242,16 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
a file name. If the expression matches, the file is skipped and not run
through the linter.
- "linelength" specifies the allowed line length for the project.
+ "linelength" allows to specify the allowed line length for the project.
The "root" option is similar in function to the --root flag (see example
- above).
+ above). Paths are relative to the directory of the CPPLINT.cfg.
+
+ The "headers" option is similar in function to the --headers flag
+ (see example above).
CPPLINT.cfg has an effect on files in the same directory and all
- subdirectories, unless overridden by a nested configuration file.
+ sub-directories, unless overridden by a nested configuration file.
Example file:
filter=-build/include_order,+build/include_alpha
@@ -261,11 +260,8 @@ Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit]
The above example disables build/include_order warning and enables
build/include_alpha as well as excludes all .cc from being
processed by linter, in the current directory (where the .cfg
- file is located) and all subdirectories.
-""" % (list(GetAllExtensions()),
- ','.join(list(GetAllExtensions())),
- GetHeaderExtensions(),
- ','.join(GetHeaderExtensions()))
+ file is located) and all sub-directories.
+"""
# We categorize each error message we print. Here are the categories.
# We want an explicit list so we can list them all in cpplint --filter=.
@@ -282,10 +278,9 @@ _ERROR_CATEGORIES = [
'build/forward_decl',
'build/header_guard',
'build/include',
+ 'build/include_subdir',
'build/include_alpha',
- 'build/include_inline',
'build/include_order',
- 'build/include_subdir',
'build/include_what_you_use',
'build/namespaces_literals',
'build/namespaces',
@@ -299,13 +294,11 @@ _ERROR_CATEGORIES = [
'readability/constructors',
'readability/fn_size',
'readability/inheritance',
- 'readability/pointer_notation',
'readability/multiline_comment',
'readability/multiline_string',
'readability/namespace',
'readability/nolint',
'readability/nul',
- 'readability/null_usage',
'readability/strings',
'readability/todo',
'readability/utf8',
@@ -356,13 +349,7 @@ _LEGACY_ERROR_CATEGORIES = [
# flag. By default all errors are on, so only add here categories that should be
# off by default (i.e., categories that must be enabled by the --filter= flags).
# All entries here should start with a '-' or '+', as in the --filter= flag.
-_DEFAULT_FILTERS = [
- '-build/include_alpha',
- '-build/include_subdir',
- '-build/include_what_you_use',
- '-legal/copyright',
- '-readability/nolint',
- ]
+_DEFAULT_FILTERS = ['-build/include_alpha']
# The default list of categories suppressed for C (not C++) files.
_DEFAULT_C_SUPPRESSED_CATEGORIES = [
@@ -635,12 +622,6 @@ _SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|'
# Match string that indicates we're working on a Linux Kernel file.
_SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)')
-_NULL_TOKEN_PATTERN = re.compile(r'\bNULL\b')
-
-_RIGHT_LEANING_POINTER_PATTERN = re.compile(r'[^=|(,\s><);&?:}]'
- r'(?<!(sizeof|return))'
- r'\s\*[a-zA-Z_][0-9a-zA-Z_]*')
-
_regexp_compile_cache = {}
# {str, set(int)}: a map from error categories to sets of linenumbers
@@ -650,6 +631,7 @@ _error_suppressions = {}
# The root directory used for deriving header guard CPP variable.
# This is set by --root flag.
_root = None
+_root_debug = False
# The top level repository directory. If set, _root is calculated relative to
# this directory instead of the directory containing version control artifacts.
@@ -659,7 +641,7 @@ _repository = None
# Files to exclude from linting. This is set by the --exclude flag.
_excludes = None
-# Whether to suppress PrintInfo messages
+# Whether to supress PrintInfo messages
_quiet = False
# The allowed line length of files.
@@ -667,19 +649,13 @@ _quiet = False
_line_length = 80
try:
- xrange(1, 0)
-except NameError:
- # -- pylint: disable=redefined-builtin
- xrange = range
-
-try:
unicode
except NameError:
# -- pylint: disable=redefined-builtin
basestring = unicode = str
try:
- long(2)
+ long
except NameError:
# -- pylint: disable=redefined-builtin
long = int
@@ -700,10 +676,38 @@ def unicode_escape_decode(x):
else:
return x
+# Treat all headers starting with 'h' equally: .h, .hpp, .hxx etc.
+# This is set by --headers flag.
+_hpp_headers = set(['h', 'hh', 'hpp', 'hxx', 'h++', 'cuh'])
+
# {str, bool}: a map from error categories to booleans which indicate if the
# category should be suppressed for every line.
_global_error_suppressions = {}
+def ProcessHppHeadersOption(val):
+ global _hpp_headers
+ try:
+ _hpp_headers = set(val.split(','))
+ # Automatically append to extensions list so it does not have to be set 2 times
+ _valid_extensions.update(_hpp_headers)
+ except ValueError:
+ PrintUsage('Header extensions must be comma separated list.')
+
+def IsHeaderExtension(file_extension):
+ return file_extension in _hpp_headers
+
+def GetHeaderExtensions():
+ return _hpp_headers or ['h']
+
+# The allowed extensions for file names
+# This is set by --extensions flag
+def GetAllExtensions():
+ if not _valid_extensions:
+ return GetHeaderExtensions().union(set(['c', 'cc', 'cpp', 'cxx', 'c++', 'cu']))
+ return _valid_extensions
+
+def GetNonHeaderExtensions():
+ return GetAllExtensions().difference(GetHeaderExtensions())
@@ -837,9 +841,9 @@ class _IncludeState(object):
# needs to move backwards, CheckNextIncludeOrder will raise an error.
_INITIAL_SECTION = 0
_MY_H_SECTION = 1
- _OTHER_H_SECTION = 2
- _C_SECTION = 3
- _CPP_SECTION = 4
+ _C_SECTION = 2
+ _CPP_SECTION = 3
+ _OTHER_H_SECTION = 4
_TYPE_NAMES = {
_C_SYS_HEADER: 'C system header',
@@ -851,9 +855,9 @@ class _IncludeState(object):
_SECTION_NAMES = {
_INITIAL_SECTION: "... nothing. (This can't be an error.)",
_MY_H_SECTION: 'a header this file implements',
- _OTHER_H_SECTION: 'other header',
_C_SECTION: 'C system header',
_CPP_SECTION: 'C++ system header',
+ _OTHER_H_SECTION: 'other header',
}
def __init__(self):
@@ -1000,6 +1004,7 @@ class _CppLintState(object):
self._filters_backup = self.filters[:]
self.counting = 'total' # In what way are we counting errors?
self.errors_by_category = {} # string to int dict storing error counts
+ self.quiet = False # Suppress non-error messagess?
# output format:
# "emacs" - format that emacs can parse (default)
@@ -1017,6 +1022,12 @@ class _CppLintState(object):
"""Sets the output format for errors."""
self.output_format = output_format
+ def SetQuiet(self, quiet):
+ """Sets the module's quiet settings, and returns the previous setting."""
+ last_quiet = self.quiet
+ self.quiet = quiet
+ return last_quiet
+
def SetVerboseLevel(self, level):
"""Sets the module's verbosity, and returns the previous setting."""
last_verbose_level = self.verbose_level
@@ -1089,7 +1100,7 @@ class _CppLintState(object):
def PrintInfo(self, message):
if not _quiet and self.output_format != 'junit':
- sys.stderr.write(message)
+ sys.stdout.write(message)
def PrintError(self, message):
if self.output_format == 'junit':
@@ -1157,6 +1168,14 @@ def _SetOutputFormat(output_format):
"""Sets the module's output format."""
_cpplint_state.SetOutputFormat(output_format)
+def _Quiet():
+ """Return's the module's quiet setting."""
+ return _cpplint_state.quiet
+
+def _SetQuiet(quiet):
+ """Set the module's quiet status, and return previous setting."""
+ return _cpplint_state.SetQuiet(quiet)
+
def _VerboseLevel():
"""Returns the module's verbosity setting."""
@@ -1299,12 +1318,54 @@ class FileInfo(object):
locations won't see bogus errors.
"""
fullname = self.FullName()
- # XXX(bnoordhuis) Expects that cpplint.py lives in the tools/ directory.
- toplevel = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')).replace('\\', '/')
- toplevel = unicode_escape_decode(toplevel)
- prefix = os.path.commonprefix([fullname, toplevel])
- return fullname[len(prefix) + 1:]
- # End Node.js patch
+
+ if os.path.exists(fullname):
+ project_dir = os.path.dirname(fullname)
+
+ # If the user specified a repository path, it exists, and the file is
+ # contained in it, use the specified repository path
+ if _repository:
+ repo = FileInfo(_repository).FullName()
+ root_dir = project_dir
+ while os.path.exists(root_dir):
+ # allow case insensitive compare on Windows
+ if os.path.normcase(root_dir) == os.path.normcase(repo):
+ return os.path.relpath(fullname, root_dir).replace('\\', '/')
+ one_up_dir = os.path.dirname(root_dir)
+ if one_up_dir == root_dir:
+ break
+ root_dir = one_up_dir
+
+ if os.path.exists(os.path.join(project_dir, ".svn")):
+ # If there's a .svn file in the current directory, we recursively look
+ # up the directory tree for the top of the SVN checkout
+ root_dir = project_dir
+ one_up_dir = os.path.dirname(root_dir)
+ while os.path.exists(os.path.join(one_up_dir, ".svn")):
+ root_dir = os.path.dirname(root_dir)
+ one_up_dir = os.path.dirname(one_up_dir)
+
+ prefix = os.path.commonprefix([root_dir, project_dir])
+ return fullname[len(prefix) + 1:]
+
+ # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by
+ # searching up from the current path.
+ root_dir = current_dir = os.path.dirname(fullname)
+ while current_dir != os.path.dirname(current_dir):
+ if (os.path.exists(os.path.join(current_dir, ".git")) or
+ os.path.exists(os.path.join(current_dir, ".hg")) or
+ os.path.exists(os.path.join(current_dir, ".svn"))):
+ root_dir = current_dir
+ current_dir = os.path.dirname(current_dir)
+
+ if (os.path.exists(os.path.join(root_dir, ".git")) or
+ os.path.exists(os.path.join(root_dir, ".hg")) or
+ os.path.exists(os.path.join(root_dir, ".svn"))):
+ prefix = os.path.commonprefix([root_dir, project_dir])
+ return fullname[len(prefix) + 1:]
+
+ # Don't know what to do; header guard warnings may be wrong...
+ return fullname
def Split(self):
"""Splits the file into the directory, basename, and extension.
@@ -1390,14 +1451,14 @@ def Error(filename, linenum, category, confidence, message):
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
- _cpplint_state.PrintError('%s(%s): warning: %s [%s] [%d]\n' % (
- filename, linenum, message, category, confidence))
+ _cpplint_state.PrintError('%s(%s): error cpplint: [%s] %s [%d]\n' % (
+ filename, linenum, category, message, confidence))
elif _cpplint_state.output_format == 'eclipse':
sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
elif _cpplint_state.output_format == 'junit':
- _cpplint_state.AddJUnitFailure(filename, linenum, message, category,
- confidence)
+ _cpplint_state.AddJUnitFailure(filename, linenum, message, category,
+ confidence)
else:
final_message = '%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence)
@@ -1913,7 +1974,7 @@ def CheckForCopyright(filename, lines, error):
# We'll say it should occur by line 10. Don't forget there's a
# dummy line at the front.
- for line in range(1, min(len(lines), 11)):
+ for line in xrange(1, min(len(lines), 11)):
if re.search(r'Copyright', lines[line], re.I): break
else: # means no copyright line was found
error(filename, 0, 'legal/copyright', 5,
@@ -1936,6 +1997,30 @@ def GetIndentLevel(line):
else:
return 0
+def PathSplitToList(path):
+ """Returns the path split into a list by the separator.
+
+ Args:
+ path: An absolute or relative path (e.g. '/a/b/c/' or '../a')
+
+ Returns:
+ A list of path components (e.g. ['a', 'b', 'c]).
+ """
+ lst = []
+ while True:
+ (head, tail) = os.path.split(path)
+ if head == path: # absolute paths end
+ lst.append(head)
+ break
+ if tail == path: # relative paths end
+ lst.append(tail)
+ break
+
+ path = head
+ lst.append(tail)
+
+ lst.reverse()
+ return lst
def GetHeaderGuardCPPVariable(filename):
"""Returns the CPP variable that should be used as a header guard.
@@ -1958,11 +2043,58 @@ def GetHeaderGuardCPPVariable(filename):
fileinfo = FileInfo(filename)
file_path_from_root = fileinfo.RepositoryName()
- if _root:
- # Convert root path to unix format because file_path_from_root is also
- # in that format and they wouldn't match otherwise on Windows machines
- root = os.path.normpath(_root).replace('\\', '/')
- file_path_from_root = re.sub('^' + root + '/', '', file_path_from_root)
+
+ def FixupPathFromRoot():
+ if _root_debug:
+ sys.stderr.write("\n_root fixup, _root = '%s', repository name = '%s'\n"
+ % (_root, fileinfo.RepositoryName()))
+
+ # Process the file path with the --root flag if it was set.
+ if not _root:
+ if _root_debug:
+ sys.stderr.write("_root unspecified\n")
+ return file_path_from_root
+
+ def StripListPrefix(lst, prefix):
+ # f(['x', 'y'], ['w, z']) -> None (not a valid prefix)
+ if lst[:len(prefix)] != prefix:
+ return None
+ # f(['a, 'b', 'c', 'd'], ['a', 'b']) -> ['c', 'd']
+ return lst[(len(prefix)):]
+
+ # root behavior:
+ # --root=subdir , lstrips subdir from the header guard
+ maybe_path = StripListPrefix(PathSplitToList(file_path_from_root),
+ PathSplitToList(_root))
+
+ if _root_debug:
+ sys.stderr.write(("_root lstrip (maybe_path=%s, file_path_from_root=%s," +
+ " _root=%s)\n") % (maybe_path, file_path_from_root, _root))
+
+ if maybe_path:
+ return os.path.join(*maybe_path)
+
+ # --root=.. , will prepend the outer directory to the header guard
+ full_path = fileinfo.FullName()
+ root_abspath = os.path.abspath(_root)
+
+ maybe_path = StripListPrefix(PathSplitToList(full_path),
+ PathSplitToList(root_abspath))
+
+ if _root_debug:
+ sys.stderr.write(("_root prepend (maybe_path=%s, full_path=%s, " +
+ "root_abspath=%s)\n") % (maybe_path, full_path, root_abspath))
+
+ if maybe_path:
+ return os.path.join(*maybe_path)
+
+ if _root_debug:
+ sys.stderr.write("_root ignore, returning %s\n" % (file_path_from_root))
+
+ # --root=FAKE_DIR is ignored
+ return file_path_from_root
+
+ file_path_from_root = FixupPathFromRoot()
return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_'
@@ -2078,22 +2210,22 @@ def CheckHeaderFileIncluded(filename, include_state, error):
return
for ext in GetHeaderExtensions():
- basefilename = filename[0:len(filename) - len(fileinfo.Extension())]
- headerfile = basefilename + '.' + ext
- if not os.path.exists(headerfile):
- continue
- headername = FileInfo(headerfile).RepositoryName()
- first_include = None
- for section_list in include_state.include_list:
- for f in section_list:
- if headername in f[0] or f[0] in headername:
- return
- if not first_include:
- first_include = f[1]
+ basefilename = filename[0:len(filename) - len(fileinfo.Extension())]
+ headerfile = basefilename + '.' + ext
+ if not os.path.exists(headerfile):
+ continue
+ headername = FileInfo(headerfile).RepositoryName()
+ first_include = None
+ for section_list in include_state.include_list:
+ for f in section_list:
+ if headername in f[0] or f[0] in headername:
+ return
+ if not first_include:
+ first_include = f[1]
- error(filename, first_include, 'build/include', 5,
- '%s should include its header file %s' % (fileinfo.RepositoryName(),
- headername))
+ error(filename, first_include, 'build/include', 5,
+ '%s should include its header file %s' % (fileinfo.RepositoryName(),
+ headername))
def CheckForBadCharacters(filename, lines, error):
@@ -2121,21 +2253,6 @@ def CheckForBadCharacters(filename, lines, error):
error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.')
-def CheckInlineHeader(filename, include_state, error):
- """Logs an error if both a header and its inline variant are included."""
-
- all_headers = dict(item for sublist in include_state.include_list
- for item in sublist)
- bad_headers = set('%s.h' % name[:-6] for name in all_headers.keys()
- if name.endswith('-inl.h'))
- bad_headers &= set(all_headers.keys())
-
- for name in bad_headers:
- err = '%s includes both %s and %s-inl.h' % (filename, name, name)
- linenum = all_headers[name]
- error(filename, linenum, 'build/include_inline', 5, err)
-
-
def CheckForNewlineAtEOF(filename, lines, error):
"""Logs an error if there is no newline char at the end of the file.
@@ -2981,7 +3098,8 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum,
# Look for single-argument constructors that aren't marked explicit.
# Technically a valid construct, but against style.
explicit_constructor_match = Match(
- r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*'
+ r'\s+(?:(?:inline|constexpr)\s+)*(explicit\s+)?'
+ r'(?:(?:inline|constexpr)\s+)*%s\s*'
r'\(((?:[^()]|\([^()]*\))*)\)'
% re.escape(base_classname),
line)
@@ -3192,7 +3310,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
if starting_func:
body_found = False
- for start_linenum in range(linenum, clean_lines.NumLines()):
+ for start_linenum in xrange(linenum, clean_lines.NumLines()):
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
@@ -3277,36 +3395,6 @@ def CheckComment(line, filename, linenum, next_line_start, error):
'Should have a space between // and comment')
-def CheckAccess(filename, clean_lines, linenum, nesting_state, error):
- """Checks for improper use of DISALLOW* macros.
-
- Args:
- filename: The name of the current file.
- clean_lines: A CleansedLines instance containing the file.
- linenum: The number of the line to check.
- nesting_state: A NestingState instance which maintains information about
- the current stack of nested blocks being parsed.
- error: The function to call with any errors found.
- """
- line = clean_lines.elided[linenum] # get rid of comments and strings
-
- matched = Match((r'\s*(DISALLOW_COPY_AND_ASSIGN|'
- r'DISALLOW_IMPLICIT_CONSTRUCTORS)'), line)
- if not matched:
- return
- if nesting_state.stack and isinstance(nesting_state.stack[-1], _ClassInfo):
- if nesting_state.stack[-1].access != 'private':
- error(filename, linenum, 'readability/constructors', 3,
- '%s must be in the private: section' % matched.group(1))
-
- else:
- # Found DISALLOW* macro outside a class declaration, or perhaps it
- # was used inside a function when it should have been part of the
- # class declaration. We could issue a warning here, but it
- # probably resulted in a compiler error already.
- pass
-
-
def CheckSpacing(filename, clean_lines, linenum, nesting_state, error):
"""Checks for the correctness of various spacing issues in the code.
@@ -4413,49 +4501,6 @@ def CheckAltTokens(filename, clean_lines, linenum, error):
'Use operator %s instead of %s' % (
_ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1)))
-def CheckNullTokens(filename, clean_lines, linenum, error):
- """Check NULL usage.
-
- Args:
- filename: The name of the current file.
- clean_lines: A CleansedLines instance containing the file.
- linenum: The number of the line to check.
- error: The function to call with any errors found.
- """
- line = clean_lines.elided[linenum]
-
- # Avoid preprocessor lines
- if Match(r'^\s*#', line):
- return
-
- if line.find('/*') >= 0 or line.find('*/') >= 0:
- return
-
- for match in _NULL_TOKEN_PATTERN.finditer(line):
- error(filename, linenum, 'readability/null_usage', 2,
- 'Use nullptr instead of NULL')
-
-def CheckLeftLeaningPointer(filename, clean_lines, linenum, error):
- """Check for left-leaning pointer placement.
-
- Args:
- filename: The name of the current file.
- clean_lines: A CleansedLines instance containing the file.
- linenum: The number of the line to check.
- error: The function to call with any errors found.
- """
- line = clean_lines.elided[linenum]
-
- # Avoid preprocessor lines
- if Match(r'^\s*#', line):
- return
-
- if '/*' in line or '*/' in line:
- return
-
- for match in _RIGHT_LEANING_POINTER_PATTERN.finditer(line):
- error(filename, linenum, 'readability/pointer_notation', 2,
- 'Use left leaning pointer instead of right leaning')
def GetLineWidth(line):
"""Determines the width of the line in column positions.
@@ -4473,6 +4518,16 @@ def GetLineWidth(line):
if unicodedata.east_asian_width(uc) in ('W', 'F'):
width += 2
elif not unicodedata.combining(uc):
+ # Issue 337
+ # https://mail.python.org/pipermail/python-list/2012-August/628809.html
+ if (sys.version_info.major, sys.version_info.minor) <= (3, 2):
+ # https://github.com/python/cpython/blob/2.7/Include/unicodeobject.h#L81
+ is_wide_build = sysconfig.get_config_var("Py_UNICODE_SIZE") >= 4
+ # https://github.com/python/cpython/blob/2.7/Objects/unicodeobject.c#L564
+ is_low_surrogate = 0xDC00 <= ord(uc) <= 0xDFFF
+ if not is_wide_build and is_low_surrogate:
+ width -= 1
+
width += 1
return width
else:
@@ -4508,10 +4563,6 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
error(filename, linenum, 'whitespace/tab', 1,
'Tab found; better to use spaces')
- if line.find('template<') != -1:
- error(filename, linenum, 'whitespace/template', 1,
- 'Leave a single space after template, as in `template <...>`')
-
# One or three blank spaces at the beginning of the line is weird; it's
# hard to reconcile that with 2-space indents.
# NOTE: here are the conditions rob pike used for his tests. Mine aren't
@@ -4550,7 +4601,7 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
# Check if the line is a header guard.
is_header_guard = False
- if file_extension in GetHeaderExtensions():
+ if IsHeaderExtension(file_extension):
cppvar = GetHeaderGuardCPPVariable(filename)
if (line.startswith('#ifndef %s' % cppvar) or
line.startswith('#define %s' % cppvar) or
@@ -4596,7 +4647,6 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
CheckBraces(filename, clean_lines, linenum, error)
CheckTrailingSemicolon(filename, clean_lines, linenum, error)
CheckEmptyBlockBody(filename, clean_lines, linenum, error)
- CheckAccess(filename, clean_lines, linenum, nesting_state, error)
CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
CheckOperatorSpacing(filename, clean_lines, linenum, error)
CheckParenthesisSpacing(filename, clean_lines, linenum, error)
@@ -4605,8 +4655,6 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
CheckCheck(filename, clean_lines, linenum, error)
CheckAltTokens(filename, clean_lines, linenum, error)
- CheckNullTokens(filename, clean_lines, linenum, error)
- CheckLeftLeaningPointer(filename, clean_lines, linenum, error)
classinfo = nesting_state.InnermostClass()
if classinfo:
CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error)
@@ -4681,7 +4729,7 @@ def _ClassifyInclude(fileinfo, include, is_system):
# Headers with C++ extensions shouldn't be considered C system headers
if is_system and os.path.splitext(include)[1] in ['.hpp', '.hxx', '.h++']:
- is_system = False
+ is_system = False
if is_system:
if is_cpp_h:
@@ -4771,10 +4819,11 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
include_state.include_list[-1].append((include, linenum))
# We want to ensure that headers appear in the right order:
- # 1) for foo.cc, foo.h
- # 2) other project headers
- # 3) c system files
- # 4) cpp system files
+ # 1) for foo.cc, foo.h (preferred location)
+ # 2) c system files
+ # 3) cpp system files
+ # 4) for foo.cc, foo.h (deprecated location)
+ # 5) other google headers
#
# We classify each include statement as one of those 5 types
# using a number of techniques. The include_state object keeps
@@ -4920,7 +4969,7 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension,
CheckGlobalStatic(filename, clean_lines, linenum, error)
CheckPrintf(filename, clean_lines, linenum, error)
- if file_extension in GetHeaderExtensions():
+ if IsHeaderExtension(file_extension):
# TODO(unknown): check that 1-arg constructors are explicit.
# How to tell it's a constructor?
# (handled in CheckForNonStandardConstructs for now)
@@ -5032,7 +5081,7 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension,
# Check for use of unnamed namespaces in header files. Registration
# macros are typically OK, so we allow use of "namespace {" on lines
# that end with backslashes.
- if (file_extension in GetHeaderExtensions()
+ if (IsHeaderExtension(file_extension)
and Search(r'\bnamespace\s*{', line)
and line[-1] != '\\'):
error(filename, linenum, 'build/namespaces', 4,
@@ -5598,7 +5647,7 @@ _re_pattern_headers_maybe_templates = []
for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
for _template in _templates:
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
- # type::max().
+ # 'type::max()'.
_re_pattern_headers_maybe_templates.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template,
@@ -5648,7 +5697,7 @@ def FilesBelongToSameModule(filename_cc, filename_h):
return (False, '')
fileinfo_h = FileInfo(filename_h)
- if not fileinfo_h.Extension().lstrip('.') in GetHeaderExtensions():
+ if not IsHeaderExtension(fileinfo_h.Extension().lstrip('.')):
return (False, '')
filename_cc = filename_cc[:-(len(fileinfo_cc.Extension()))]
@@ -5720,7 +5769,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
- for linenum in range(clean_lines.NumLines()):
+ for linenum in xrange(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
continue
@@ -6126,10 +6175,10 @@ def ProcessFileData(filename, file_extension, lines, error,
RemoveMultiLineComments(filename, lines, error)
clean_lines = CleansedLines(lines)
- if file_extension in GetHeaderExtensions():
+ if IsHeaderExtension(file_extension):
CheckForHeaderGuard(filename, clean_lines, error)
- for line in range(clean_lines.NumLines()):
+ for line in xrange(clean_lines.NumLines()):
ProcessLine(filename, file_extension, clean_lines, line,
include_state, function_state, nesting_state, error,
extra_check_functions)
@@ -6148,8 +6197,6 @@ def ProcessFileData(filename, file_extension, lines, error,
CheckForNewlineAtEOF(filename, lines, error)
- CheckInlineHeader(filename, include_state, error)
-
def ProcessConfigOverrides(filename):
""" Loads the configuration files and processes the config overrides.
@@ -6197,37 +6244,35 @@ def ProcessConfigOverrides(filename):
if base_name:
pattern = re.compile(val)
if pattern.match(base_name):
- _cpplint_state.PrintInfo('Ignoring "%s": file excluded by '
- '"%s". File path component "%s" matches pattern "%s"\n' %
- (filename, cfg_file, base_name, val))
+ if _cpplint_state.quiet:
+ # Suppress "Ignoring file" warning when using --quiet.
+ return False
+ _cpplint_state.PrintInfo('Ignoring "%s": file excluded by "%s". '
+ 'File path component "%s" matches '
+ 'pattern "%s"\n' %
+ (filename, cfg_file, base_name, val))
return False
elif name == 'linelength':
global _line_length
try:
- _line_length = int(val)
+ _line_length = int(val)
except ValueError:
- _cpplint_state.PrintError('Line length must be numeric.')
+ _cpplint_state.PrintError('Line length must be numeric.')
elif name == 'extensions':
- global _valid_extensions
- try:
- extensions = [ext.strip() for ext in val.split(',')]
- _valid_extensions = set(extensions)
- except ValueError:
- sys.stderr.write('Extensions should be a comma-separated list of values;'
- 'for example: extensions=hpp,cpp\n'
- 'This could not be parsed: "%s"' % (val,))
- elif name == 'headers':
- global _header_extensions
- try:
- extensions = [ext.strip() for ext in val.split(',')]
- _header_extensions = set(extensions)
- except ValueError:
- sys.stderr.write('Extensions should be a comma-separated list of values;'
- 'for example: extensions=hpp,cpp\n'
- 'This could not be parsed: "%s"' % (val,))
+ global _valid_extensions
+ try:
+ extensions = [ext.strip() for ext in val.split(',')]
+ _valid_extensions = set(extensions)
+ except ValueError:
+ sys.stderr.write('Extensions should be a comma-separated list of values;'
+ 'for example: extensions=hpp,cpp\n'
+ 'This could not be parsed: "%s"' % (val,))
elif name == 'root':
global _root
- _root = val
+ # root directories are specified relative to CPPLINT.cfg dir.
+ _root = os.path.join(os.path.dirname(cfg_file), val)
+ elif name == 'headers':
+ ProcessHppHeadersOption(val)
else:
_cpplint_state.PrintError(
'Invalid configuration option (%s) in file %s\n' %
@@ -6241,7 +6286,7 @@ def ProcessConfigOverrides(filename):
# Apply all the accumulated filters in reverse order (top-level directory
# config options having the least priority).
for cfg_filter in reversed(cfg_filters):
- _AddFilters(cfg_filter)
+ _AddFilters(cfg_filter)
return True
@@ -6262,6 +6307,7 @@ def ProcessFile(filename, vlevel, extra_check_functions=None):
_SetVerboseLevel(vlevel)
_BackupFilters()
+ old_errors = _cpplint_state.error_count
if not ProcessConfigOverrides(filename):
_RestoreFilters()
@@ -6330,7 +6376,10 @@ def ProcessFile(filename, vlevel, extra_check_functions=None):
Error(filename, linenum, 'whitespace/newline', 1,
'Unexpected \\r (^M) found; better to use only \\n')
- _cpplint_state.PrintInfo('Done processing %s\n' % filename)
+ # Suppress printing anything if --quiet was passed unless the error
+ # count has increased after processing this file.
+ if not _cpplint_state.quiet or old_errors != _cpplint_state.error_count:
+ _cpplint_state.PrintInfo('Done processing %s\n' % filename)
_RestoreFilters()
@@ -6340,13 +6389,21 @@ def PrintUsage(message):
Args:
message: The optional error message.
"""
- sys.stderr.write(_USAGE)
+ sys.stderr.write(_USAGE % (list(GetAllExtensions()),
+ ','.join(list(GetAllExtensions())),
+ GetHeaderExtensions(),
+ ','.join(GetHeaderExtensions())))
if message:
sys.exit('\nFATAL ERROR: ' + message)
else:
sys.exit(0)
+def PrintVersion():
+ sys.stdout.write('Cpplint fork (https://github.com/cpplint/cpplint)\n')
+ sys.stdout.write('cpplint ' + __VERSION__ + '\n')
+ sys.stdout.write('Python ' + sys.version + '\n')
+ sys.exit(0)
def PrintCategories():
"""Prints a list of all the error-categories used by error messages.
@@ -6370,6 +6427,8 @@ def ParseArguments(args):
"""
try:
(opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=',
+ 'v=',
+ 'version',
'counting=',
'filter=',
'root=',
@@ -6377,27 +6436,32 @@ def ParseArguments(args):
'linelength=',
'extensions=',
'exclude=',
+ 'recursive',
'headers=',
- 'quiet',
- 'recursive'])
+ 'quiet'])
except getopt.GetoptError:
PrintUsage('Invalid arguments.')
verbosity = _VerboseLevel()
output_format = _OutputFormat()
filters = ''
+ quiet = _Quiet()
counting_style = ''
recursive = False
for (opt, val) in opts:
if opt == '--help':
PrintUsage(None)
+ if opt == '--version':
+ PrintVersion()
elif opt == '--output':
if val not in ('emacs', 'vs7', 'eclipse', 'junit'):
PrintUsage('The only allowed output formats are emacs, vs7, eclipse '
'and junit.')
output_format = val
- elif opt == '--verbose':
+ elif opt == '--quiet':
+ quiet = True
+ elif opt == '--verbose' or opt == '--v':
verbosity = int(val)
elif opt == '--filter':
filters = val
@@ -6429,18 +6493,11 @@ def ParseArguments(args):
try:
_valid_extensions = set(val.split(','))
except ValueError:
- PrintUsage('Extensions must be comma seperated list.')
- elif opt == '--headers':
- global _header_extensions
- try:
- _header_extensions = set(val.split(','))
- except ValueError:
PrintUsage('Extensions must be comma seperated list.')
+ elif opt == '--headers':
+ ProcessHppHeadersOption(val)
elif opt == '--recursive':
recursive = True
- elif opt == '--quiet':
- global _quiet
- _quiet = True
if not filenames:
PrintUsage('No files were specified.')
@@ -6452,6 +6509,7 @@ def ParseArguments(args):
filenames = _FilterExcludedFiles(filenames)
_SetOutputFormat(output_format)
+ _SetQuiet(quiet)
_SetVerboseLevel(verbosity)
_SetFilters(filters)
_SetCountingStyle(counting_style)
@@ -6472,21 +6530,21 @@ def _ExpandDirectories(filenames):
"""
expanded = set()
for filename in filenames:
- if not os.path.isdir(filename):
- expanded.add(filename)
- continue
+ if not os.path.isdir(filename):
+ expanded.add(filename)
+ continue
- for root, _, files in os.walk(filename):
- for loopfile in files:
- fullname = os.path.join(root, loopfile)
- if fullname.startswith('.' + os.path.sep):
- fullname = fullname[len('.' + os.path.sep):]
- expanded.add(fullname)
+ for root, _, files in os.walk(filename):
+ for loopfile in files:
+ fullname = os.path.join(root, loopfile)
+ if fullname.startswith('.' + os.path.sep):
+ fullname = fullname[len('.' + os.path.sep):]
+ expanded.add(fullname)
filtered = []
for filename in expanded:
- if os.path.splitext(filename)[1][1:] in GetAllExtensions():
- filtered.append(filename)
+ if os.path.splitext(filename)[1][1:] in GetAllExtensions():
+ filtered.append(filename)
return filtered
@@ -6508,7 +6566,9 @@ def main():
_cpplint_state.ResetErrorCounts()
for filename in filenames:
ProcessFile(filename, _cpplint_state.verbose_level)
- _cpplint_state.PrintErrorCounts()
+ # If --quiet is passed, suppress printing error count unless there are errors.
+ if not _cpplint_state.quiet or _cpplint_state.error_count > 0:
+ _cpplint_state.PrintErrorCounts()
if _cpplint_state.output_format == 'junit':
sys.stderr.write(_cpplint_state.FormatJUnitXML())