Skip to content

Commit

Permalink
[analyzer] Allow --file and skipfile option to be given together
Browse files Browse the repository at this point in the history
The CodeChecker VSCodePlugin uses the `--file` parameter to analyze single files.
Large projects load in their configuration using the `--config` parameter and
if there is a `-i skipfile` given in the config, `CodeChecker analyze` call
drops an error.

This patch will allow `-i skipfile` and `--file` to be given together.
  • Loading branch information
csordasmarton committed Mar 7, 2022
1 parent 0fd8a2e commit c1fd053
Show file tree
Hide file tree
Showing 16 changed files with 292 additions and 143 deletions.
34 changes: 17 additions & 17 deletions analyzer/codechecker_analyzer/analysis_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def is_ctu_active(source_analyzer):


def prepare_check(action, analyzer_config, output_dir, checker_labels,
skip_handler, statistics_data, disable_ctu=False):
skip_handlers, statistics_data, disable_ctu=False):
""" Construct the source analyzer and result handler. """
# Create a source analyzer.
source_analyzer = \
Expand Down Expand Up @@ -209,7 +209,7 @@ def prepare_check(action, analyzer_config, output_dir, checker_labels,
rh = source_analyzer.construct_result_handler(action,
output_dir,
checker_labels,
skip_handler)
skip_handlers)

# NOTICE!
# The currently analyzed source file needs to be set before the
Expand All @@ -220,7 +220,7 @@ def prepare_check(action, analyzer_config, output_dir, checker_labels,
return source_analyzer, rh


def handle_success(rh, result_file, result_base, skip_handler,
def handle_success(rh, result_file, result_base, skip_handlers,
capture_analysis_output, success_dir):
"""
Result postprocessing is required if the analysis was
Expand All @@ -232,7 +232,7 @@ def handle_success(rh, result_file, result_base, skip_handler,
save_output(os.path.join(success_dir, result_base),
rh.analyzer_stdout, rh.analyzer_stderr)

rh.postprocess_result(skip_handler)
rh.postprocess_result(skip_handlers)

# Generated reports will be handled separately at store.

Expand Down Expand Up @@ -319,7 +319,7 @@ def handle_reproducer(source_analyzer, rh, zip_file, actions_map):


def handle_failure(
source_analyzer, rh, zip_file, result_base, actions_map, skip_handler
source_analyzer, rh, zip_file, result_base, actions_map, skip_handlers
):
"""
If the analysis fails a debug zip is packed together which contains
Expand All @@ -334,7 +334,7 @@ def handle_failure(
checks = source_analyzer.config_handler.checks()
state = checks.get('clang-diagnostic-error', (CheckerState.default, ''))[0]
if state != CheckerState.disabled:
rh.postprocess_result(skip_handler)
rh.postprocess_result(skip_handlers)

# Remove files that successfully analyzed earlier on.
plist_file = result_base + ".plist"
Expand Down Expand Up @@ -488,7 +488,7 @@ def check(check_data):
skiplist handler is None if no skip file was configured.
"""
actions_map, action, context, analyzer_config, \
output_dir, skip_handler, quiet_output_on_stdout, \
output_dir, skip_handlers, quiet_output_on_stdout, \
capture_analysis_output, generate_reproducer, analysis_timeout, \
analyzer_environment, ctu_reanalyze_on_failure, \
output_dirs, statistics_data = check_data
Expand All @@ -509,7 +509,7 @@ def check(check_data):

source_analyzer, rh = prepare_check(action, analyzer_config,
output_dir, context.checker_labels,
skip_handler, statistics_data)
skip_handlers, statistics_data)

reanalyzed = os.path.exists(rh.analyzer_result_file)

Expand Down Expand Up @@ -605,12 +605,12 @@ def handle_analysis_result(success, zip_file=zip_file):

if success:
handle_success(rh, result_file, result_base,
skip_handler, capture_analysis_output,
skip_handlers, capture_analysis_output,
success_dir)
elif not generate_reproducer:
handle_failure(source_analyzer, rh,
os.path.join(failed_dir, zip_file),
result_base, actions_map, skip_handler)
result_base, actions_map, skip_handlers)

if rh.analyzer_returncode == 0:
handle_analysis_result(success=True)
Expand Down Expand Up @@ -639,7 +639,7 @@ def handle_analysis_result(success, zip_file=zip_file):
source_analyzer, rh = \
prepare_check(action, analyzer_config,
output_dir, context.checker_labels,
skip_handler, statistics_data,
skip_handlers, statistics_data,
True)
reanalyzed = os.path.exists(rh.analyzer_result_file)

Expand Down Expand Up @@ -698,20 +698,20 @@ def handle_analysis_result(success, zip_file=zip_file):
action.source


def skip_cpp(compile_actions, skip_handler):
def skip_cpp(compile_actions, skip_handlers):
"""If there is no skiplist handler there was no skip list file in
the command line.
C++ file skipping is handled here.
"""

if not skip_handler:
if not skip_handlers:
return compile_actions, []

analyze = []
skip = []
for compile_action in compile_actions:

if skip_handler and skip_handler.should_skip(compile_action.source):
if skip_handlers and skip_handlers.should_skip(compile_action.source):
skip.append(compile_action)
else:
analyze.append(compile_action)
Expand All @@ -720,7 +720,7 @@ def skip_cpp(compile_actions, skip_handler):


def start_workers(actions_map, actions, context, analyzer_config_map,
jobs, output_path, skip_handler, metadata_tool,
jobs, output_path, skip_handlers, metadata_tool,
quiet_analyze, capture_analysis_output, generate_reproducer,
timeout, ctu_reanalyze_on_failure, statistics_data, manager,
compile_cmd_count):
Expand All @@ -738,7 +738,7 @@ def signal_handler(signum, frame):
sys.exit(128 + signum)

signal.signal(signal.SIGINT, signal_handler)
actions, skipped_actions = skip_cpp(actions, skip_handler)
actions, skipped_actions = skip_cpp(actions, skip_handlers)
# Start checking parallel.
checked_var = multiprocessing.Value('i', 1)
actions_num = multiprocessing.Value('i', len(actions))
Expand Down Expand Up @@ -781,7 +781,7 @@ def signal_handler(signum, frame):
context,
analyzer_config_map.get(build_action.analyzer_type),
output_path,
skip_handler,
skip_handlers,
quiet_analyze,
capture_analysis_output,
generate_reproducer,
Expand Down
12 changes: 6 additions & 6 deletions analyzer/codechecker_analyzer/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def __get_ctu_data(config_map, ctu_dir):
'ctu_temp_fnmap_folder': 'tmpExternalFnMaps'}


def perform_analysis(args, skip_handler, context, actions, metadata_tool,
def perform_analysis(args, skip_handlers, context, actions, metadata_tool,
compile_cmd_count):
"""
Perform static analysis via the given (or if not, all) analyzers,
Expand Down Expand Up @@ -280,7 +280,7 @@ def perform_analysis(args, skip_handler, context, actions, metadata_tool,
ctu_data = __get_ctu_data(config_map, ctu_dir)

makefile_creator = MakeFileCreator(analyzers, args.output_path,
config_map, context, skip_handler,
config_map, context, skip_handlers,
ctu_collect, statistics_data,
ctu_data)
makefile_creator.create(actions)
Expand Down Expand Up @@ -314,13 +314,13 @@ def perform_analysis(args, skip_handler, context, actions, metadata_tool,

pre_analyze = [a for a in actions
if a.analyzer_type == ClangSA.ANALYZER_NAME]
pre_anal_skip_handler = None
pre_anal_skip_handlers = None

# Skip list is applied only in pre-analysis
# if --ctu-collect or --stats-collect was called explicitly
if ((ctu_collect and not ctu_analyze)
or ("stats_output" in args and args.stats_output)):
pre_anal_skip_handler = skip_handler
pre_anal_skip_handlers = skip_handlers

clangsa_config = config_map.get(ClangSA.ANALYZER_NAME)

Expand All @@ -329,7 +329,7 @@ def perform_analysis(args, skip_handler, context, actions, metadata_tool,
context,
clangsa_config,
args.jobs,
pre_anal_skip_handler,
pre_anal_skip_handlers,
ctu_data,
statistics_data,
manager)
Expand All @@ -349,7 +349,7 @@ def perform_analysis(args, skip_handler, context, actions, metadata_tool,
analysis_manager.start_workers(actions_map, actions, context,
config_map, args.jobs,
args.output_path,
skip_handler,
skip_handlers,
metadata_tool,
'quiet' in args,
'capture_analysis_output' in args,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from codechecker_report_converter.report import report_file
from codechecker_report_converter.report.hash import get_report_hash, HashType
from codechecker_common.logger import get_logger
from codechecker_common.skiplist_handler import SkipListHandler
from codechecker_common.skiplist_handler import SkipListHandlers

from ..result_handler_base import ResultHandler

Expand All @@ -34,7 +34,7 @@ def __init__(self, *args, **kwargs):

super(ClangSAResultHandler, self).__init__(*args, **kwargs)

def postprocess_result(self, skip_handler: Optional[SkipListHandler]):
def postprocess_result(self, skip_handlers: Optional[SkipListHandlers]):
"""
Generate analyzer result output file which can be parsed and stored
into the database.
Expand All @@ -43,7 +43,7 @@ def postprocess_result(self, skip_handler: Optional[SkipListHandler]):
reports = report_file.get_reports(
self.analyzer_result_file, self.checker_labels,
source_dir_path=self.source_dir_path)
reports = [r for r in reports if not r.skip(skip_handler)]
reports = [r for r in reports if not r.skip(skip_handlers)]

hash_type = None
if self.report_hash_type in ['context-free', 'context-free-v2']:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from codechecker_report_converter.report.hash import get_report_hash, HashType

from codechecker_common.logger import get_logger
from codechecker_common.skiplist_handler import SkipListHandler
from codechecker_common.skiplist_handler import SkipListHandlers

from ..result_handler_base import ResultHandler

Expand All @@ -36,7 +36,7 @@ def __init__(self, *args, **kwargs):

super(ClangTidyResultHandler, self).__init__(*args, **kwargs)

def postprocess_result(self, skip_handler: Optional[SkipListHandler]):
def postprocess_result(self, skip_handlers: Optional[SkipListHandlers]):
"""
Generate analyzer result output file which can be parsed and stored
into the database.
Expand All @@ -45,7 +45,7 @@ def postprocess_result(self, skip_handler: Optional[SkipListHandler]):
tidy_stdout = self.analyzer_stdout.splitlines()

reports = Parser().get_reports_from_iter(tidy_stdout)
reports = [r for r in reports if not r.skip(skip_handler)]
reports = [r for r in reports if not r.skip(skip_handlers)]

# In the earlier versions of CodeChecker Clang Tidy never used context
# free hash even if we enabled it with '--report-hash context-free'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from typing import Optional

from codechecker_common.logger import get_logger
from codechecker_common.skiplist_handler import SkipListHandler
from codechecker_common.skiplist_handler import SkipListHandlers


LOG = get_logger('analyzer')
Expand Down Expand Up @@ -176,7 +176,7 @@ def clean_results(self):
# There might be no result file if analysis failed.
LOG.debug(oserr)

def postprocess_result(self, skip_handler: Optional[SkipListHandler]):
def postprocess_result(self, skip_handlers: Optional[SkipListHandlers]):
"""
Postprocess result if needed.
Should be called after the analyses finished.
Expand Down
19 changes: 10 additions & 9 deletions analyzer/codechecker_analyzer/buildlog/log_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -1193,8 +1193,8 @@ def parse_unique_log(compilation_database,
compiler_info_file=None,
keep_gcc_include_fixed=False,
keep_gcc_intrin=False,
analysis_skip_handler=None,
pre_analysis_skip_handler=None,
analysis_skip_handlers=None,
pre_analysis_skip_handlers=None,
ctu_or_stats_enabled=False,
env=None,
analyzer_clang_version=None):
Expand Down Expand Up @@ -1240,10 +1240,10 @@ def parse_unique_log(compilation_database,
pre analysis step nothing should be skipped to collect the required
information for the analysis step where not all the files are analyzed.
analysis_skip_handler -- skip handler for files which should be skipped
analysis_skip_handlers -- skip handlers for files which should be skipped
during analysis
pre_analysis_skip_handler -- skip handler for files wich should be skipped
during pre analysis
pre_analysis_skip_handlers -- skip handlers for files wich should be
skipped during pre analysis
ctu_or_stats_enabled -- ctu or statistics based analysis was enabled
influences the behavior which files are skipped.
env -- Is the environment where a subprocess call should be executed.
Expand Down Expand Up @@ -1274,10 +1274,11 @@ def parse_unique_log(compilation_database,
# at both analysis phases (pre analysis and analysis).
# Skipping of the compile commands is done differently if no
# CTU or statistics related feature was enabled.
if analysis_skip_handler \
and analysis_skip_handler.should_skip(entry['file']) \
and (not ctu_or_stats_enabled or pre_analysis_skip_handler
and pre_analysis_skip_handler.should_skip(entry['file'])):
if analysis_skip_handlers \
and analysis_skip_handlers.should_skip(entry['file']) \
and (not ctu_or_stats_enabled or pre_analysis_skip_handlers
and pre_analysis_skip_handlers.should_skip(
entry['file'])):
skipped_cmp_cmd_count += 1
continue

Expand Down
Loading

0 comments on commit c1fd053

Please sign in to comment.