2
0
mirror of https://github.com/edk2-porting/linux-next.git synced 2024-12-28 15:13:55 +08:00
linux-next/scripts/clang-tools/gen_compile_commands.py
Nathan Huckleberry 6ad7cbc015 Makefile: Add clang-tidy and static analyzer support to makefile
This patch adds clang-tidy and the clang static-analyzer as make
targets. The goal of this patch is to make static analysis tools
usable and extendable by any developer or researcher who is familiar
with basic c++.

The current static analysis tools require intimate knowledge of the
internal workings of the static analysis. Clang-tidy and the clang
static analyzers expose an easy to use api and allow users unfamiliar
with clang to write new checks with relative ease.

===Clang-tidy===

Clang-tidy is an easily extendable 'linter' that runs on the AST.
Clang-tidy checks are easy to write and understand. A check consists of
two parts, a matcher and a checker. The matcher is created using a
domain specific language that acts on the AST
(https://clang.llvm.org/docs/LibASTMatchersReference.html).  When AST
nodes are found by the matcher a callback is made to the checker. The
checker can then execute additional checks and issue warnings.

Here is an example clang-tidy check to report functions that have calls
to local_irq_disable without calls to local_irq_enable and vice-versa.
Functions flagged with __attribute((annotation("ignore_irq_balancing")))
are ignored for analysis. (https://reviews.llvm.org/D65828)

===Clang static analyzer===

The clang static analyzer is a more powerful static analysis tool that
uses symbolic execution to find bugs. Currently there is a check that
looks for potential security bugs from invalid uses of kmalloc and
kfree. There are several more general purpose checks that are useful for
the kernel.

The clang static analyzer is well documented and designed to be
extensible.
(https://clang-analyzer.llvm.org/checker_dev_manual.html)
(https://github.com/haoNoQ/clang-analyzer-guide/releases/download/v0.1/clang-analyzer-guide-v0.1.pdf)

The main draw of the clang tools is how accessible they are. The clang
documentation is very nice and these tools are built specifically to be
easily extendable by any developer. They provide an accessible method of
bug-finding and research to people who are not overly familiar with the
kernel codebase.

Signed-off-by: Nathan Huckleberry <nhuck@google.com>
Reviewed-by: Nick Desaulniers <ndesaulniers@google.com>
Tested-by: Nick Desaulniers <ndesaulniers@google.com>
Tested-by: Lukas Bulwahn <lukas.bulwahn@gmail.com>
Signed-off-by: Masahiro Yamada <masahiroy@kernel.org>
2020-08-27 00:44:33 +09:00

237 lines
7.8 KiB
Python
Executable File

#!/usr/bin/env python
# SPDX-License-Identifier: GPL-2.0
#
# Copyright (C) Google LLC, 2018
#
# Author: Tom Roeder <tmroeder@google.com>
#
"""A tool for generating compile_commands.json in the Linux kernel."""
import argparse
import json
import logging
import os
import re
import subprocess
_DEFAULT_OUTPUT = 'compile_commands.json'
_DEFAULT_LOG_LEVEL = 'WARNING'
_FILENAME_PATTERN = r'^\..*\.cmd$'
_LINE_PATTERN = r'^cmd_[^ ]*\.o := (.* )([^ ]*\.c)$'
_VALID_LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
def parse_arguments():
"""Sets up and parses command-line arguments.
Returns:
log_level: A logging level to filter log output.
directory: The work directory where the objects were built.
ar: Command used for parsing .a archives.
output: Where to write the compile-commands JSON file.
paths: The list of files/directories to handle to find .cmd files.
"""
usage = 'Creates a compile_commands.json database from kernel .cmd files'
parser = argparse.ArgumentParser(description=usage)
directory_help = ('specify the output directory used for the kernel build '
'(defaults to the working directory)')
parser.add_argument('-d', '--directory', type=str, default='.',
help=directory_help)
output_help = ('path to the output command database (defaults to ' +
_DEFAULT_OUTPUT + ')')
parser.add_argument('-o', '--output', type=str, default=_DEFAULT_OUTPUT,
help=output_help)
log_level_help = ('the level of log messages to produce (defaults to ' +
_DEFAULT_LOG_LEVEL + ')')
parser.add_argument('--log_level', choices=_VALID_LOG_LEVELS,
default=_DEFAULT_LOG_LEVEL, help=log_level_help)
ar_help = 'command used for parsing .a archives'
parser.add_argument('-a', '--ar', type=str, default='llvm-ar', help=ar_help)
paths_help = ('directories to search or files to parse '
'(files should be *.o, *.a, or modules.order). '
'If nothing is specified, the current directory is searched')
parser.add_argument('paths', type=str, nargs='*', help=paths_help)
args = parser.parse_args()
return (args.log_level,
os.path.abspath(args.directory),
args.output,
args.ar,
args.paths if len(args.paths) > 0 else [args.directory])
def cmdfiles_in_dir(directory):
"""Generate the iterator of .cmd files found under the directory.
Walk under the given directory, and yield every .cmd file found.
Args:
directory: The directory to search for .cmd files.
Yields:
The path to a .cmd file.
"""
filename_matcher = re.compile(_FILENAME_PATTERN)
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
if filename_matcher.match(filename):
yield os.path.join(dirpath, filename)
def to_cmdfile(path):
"""Return the path of .cmd file used for the given build artifact
Args:
Path: file path
Returns:
The path to .cmd file
"""
dir, base = os.path.split(path)
return os.path.join(dir, '.' + base + '.cmd')
def cmdfiles_for_o(obj):
"""Generate the iterator of .cmd files associated with the object
Yield the .cmd file used to build the given object
Args:
obj: The object path
Yields:
The path to .cmd file
"""
yield to_cmdfile(obj)
def cmdfiles_for_a(archive, ar):
"""Generate the iterator of .cmd files associated with the archive.
Parse the given archive, and yield every .cmd file used to build it.
Args:
archive: The archive to parse
Yields:
The path to every .cmd file found
"""
for obj in subprocess.check_output([ar, '-t', archive]).decode().split():
yield to_cmdfile(obj)
def cmdfiles_for_modorder(modorder):
"""Generate the iterator of .cmd files associated with the modules.order.
Parse the given modules.order, and yield every .cmd file used to build the
contained modules.
Args:
modorder: The modules.order file to parse
Yields:
The path to every .cmd file found
"""
with open(modorder) as f:
for line in f:
ko = line.rstrip()
base, ext = os.path.splitext(ko)
if ext != '.ko':
sys.exit('{}: module path must end with .ko'.format(ko))
mod = base + '.mod'
# The first line of *.mod lists the objects that compose the module.
with open(mod) as m:
for obj in m.readline().split():
yield to_cmdfile(obj)
def process_line(root_directory, command_prefix, file_path):
"""Extracts information from a .cmd line and creates an entry from it.
Args:
root_directory: The directory that was searched for .cmd files. Usually
used directly in the "directory" entry in compile_commands.json.
command_prefix: The extracted command line, up to the last element.
file_path: The .c file from the end of the extracted command.
Usually relative to root_directory, but sometimes absolute.
Returns:
An entry to append to compile_commands.
Raises:
ValueError: Could not find the extracted file based on file_path and
root_directory or file_directory.
"""
# The .cmd files are intended to be included directly by Make, so they
# escape the pound sign '#', either as '\#' or '$(pound)' (depending on the
# kernel version). The compile_commands.json file is not interepreted
# by Make, so this code replaces the escaped version with '#'.
prefix = command_prefix.replace('\#', '#').replace('$(pound)', '#')
# Use os.path.abspath() to normalize the path resolving '.' and '..' .
abs_path = os.path.abspath(os.path.join(root_directory, file_path))
if not os.path.exists(abs_path):
raise ValueError('File %s not found' % abs_path)
return {
'directory': root_directory,
'file': abs_path,
'command': prefix + file_path,
}
def main():
"""Walks through the directory and finds and parses .cmd files."""
log_level, directory, output, ar, paths = parse_arguments()
level = getattr(logging, log_level)
logging.basicConfig(format='%(levelname)s: %(message)s', level=level)
line_matcher = re.compile(_LINE_PATTERN)
compile_commands = []
for path in paths:
# If 'path' is a directory, handle all .cmd files under it.
# Otherwise, handle .cmd files associated with the file.
# Most of built-in objects are linked via archives (built-in.a or lib.a)
# but some objects are linked to vmlinux directly.
# Modules are listed in modules.order.
if os.path.isdir(path):
cmdfiles = cmdfiles_in_dir(path)
elif path.endswith('.o'):
cmdfiles = cmdfiles_for_o(path)
elif path.endswith('.a'):
cmdfiles = cmdfiles_for_a(path, ar)
elif path.endswith('modules.order'):
cmdfiles = cmdfiles_for_modorder(path)
else:
sys.exit('{}: unknown file type'.format(path))
for cmdfile in cmdfiles:
with open(cmdfile, 'rt') as f:
result = line_matcher.match(f.readline())
if result:
try:
entry = process_line(directory, result.group(1),
result.group(2))
compile_commands.append(entry)
except ValueError as err:
logging.info('Could not add line from %s: %s',
cmdfile, err)
with open(output, 'wt') as f:
json.dump(compile_commands, f, indent=2, sort_keys=True)
if __name__ == '__main__':
main()