Skip to content

Commit

Permalink
Detection status
Browse files Browse the repository at this point in the history
Store detection status for the reports in the database.
  • Loading branch information
bruntib committed Aug 18, 2017
1 parent 65af932 commit a112911
Show file tree
Hide file tree
Showing 13 changed files with 596 additions and 289 deletions.
29 changes: 17 additions & 12 deletions api/report_server.thrift
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ struct RunData{
6: string runCmd, // the used check command
7: optional bool can_delete // true if codeCheckerDBAccess::removeRunResults()
// is allowed on this run (see issue 151)
8: map<string, i32> detectionStatusCount
// this maps the detection status to its count
}
typedef list<RunData> RunDataList

Expand All @@ -43,16 +45,17 @@ struct ReviewData{

//-----------------------------------------------------------------------------
struct ReportData{
1: string checkerId, // the qualified id of the checker that reported this
2: string bugHash, // This is unique id of the concrete report.
3: string checkedFile, // this is a filepath
4: string checkerMsg, // description of the bug report
5: i64 reportId, // id of the report in the current run in the db
6: i64 fileId, // unique id of the file the report refers to
7: shared.BugPathEvent lastBugPosition // This contains the range and message of the last item in the symbolic
// execution step list.
8: shared.Severity severity // checker severity
9: ReviewData review // bug review status informations.
1: string checkerId, // the qualified id of the checker that reported this
2: string bugHash, // This is unique id of the concrete report.
3: string checkedFile, // this is a filepath
4: string checkerMsg, // description of the bug report
5: i64 reportId, // id of the report in the current run in the db
6: i64 fileId, // unique id of the file the report refers to
7: shared.BugPathEvent lastBugPosition // This contains the range and message of the last item in the symbolic
// execution step list.
8: shared.Severity severity // checker severity
9: ReviewData review // bug review status informations.
10: string detectionStatus // 'new', 'resolved', 'unresolved', 'reopened'
}
typedef list<ReportData> ReportDataList

Expand Down Expand Up @@ -347,13 +350,15 @@ service codeCheckerDBAccess {
// * If none of them matches a (new file_id, true) is returned.
NeedFileResult needFileContent(
1: string filepath,
2: string content_hash)
2: string content_hash,
3: i64 run_id)
throws (1: shared.RequestFailed requestError),

bool addFileContent(
1: string content_hash,
2: string file_content,
3: optional Encoding encoding)
3: optional Encoding encoding,
4: i64 run_id)
throws (1: shared.RequestFailed requestError),

bool finishCheckerRun(1: i64 run_id)
Expand Down
74 changes: 74 additions & 0 deletions db_migrate/versions/41c3d07202db_detection_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
"""detection_status
Revision ID: 41c3d07202db
Revises: 21a4ca1179da
Create Date: 2017-07-23 17:15:23.289229
"""

# revision identifiers, used by Alembic.
revision = '41c3d07202db'
down_revision = '21a4ca1179da'
branch_labels = None
depends_on = None

from alembic import op
import sqlalchemy as sa


def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('bug_path_events', sa.Column('order', sa.Integer(), nullable=True))
op.add_column('bug_path_events', sa.Column('report_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_bug_path_events_report_id'), 'bug_path_events', ['report_id'], unique=False)
op.create_foreign_key(op.f('fk_bug_path_events_report_id_reports'), 'bug_path_events', 'reports', ['report_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.drop_column('bug_path_events', 'prev')
op.drop_column('bug_path_events', 'next')
op.add_column('bug_report_points', sa.Column('order', sa.Integer(), nullable=True))
op.add_column('bug_report_points', sa.Column('report_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_bug_report_points_report_id'), 'bug_report_points', ['report_id'], unique=False)
op.create_foreign_key(op.f('fk_bug_report_points_report_id_reports'), 'bug_report_points', 'reports', ['report_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.drop_column('bug_report_points', 'next')
op.drop_constraint(u'fk_files_run_id_runs', 'files', type_='foreignkey')
op.drop_column('files', 'inc_count')
op.drop_column('files', 'run_id')
op.add_column('reports', sa.Column('detection_status', sa.String(), nullable=True))
op.drop_index('ix_reports_end_bugevent', table_name='reports')
op.drop_index('ix_reports_start_bugevent', table_name='reports')
op.drop_constraint(u'fk_reports_start_bugpoint_bug_report_points', 'reports', type_='foreignkey')
op.drop_constraint(u'fk_reports_start_bugevent_bug_path_events', 'reports', type_='foreignkey')
op.drop_constraint(u'fk_reports_end_bugevent_bug_path_events', 'reports', type_='foreignkey')
op.drop_column('reports', 'end_bugevent')
op.drop_column('reports', 'start_bugpoint')
op.drop_column('reports', 'start_bugevent')
op.drop_column('runs', 'inc_count')
### end Alembic commands ###


def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('runs', sa.Column('inc_count', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('reports', sa.Column('start_bugevent', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('reports', sa.Column('start_bugpoint', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('reports', sa.Column('end_bugevent', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(u'fk_reports_end_bugevent_bug_path_events', 'reports', 'bug_path_events', ['end_bugevent'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.create_foreign_key(u'fk_reports_start_bugevent_bug_path_events', 'reports', 'bug_path_events', ['start_bugevent'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.create_foreign_key(u'fk_reports_start_bugpoint_bug_report_points', 'reports', 'bug_report_points', ['start_bugpoint'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.create_index('ix_reports_start_bugevent', 'reports', ['start_bugevent'], unique=False)
op.create_index('ix_reports_end_bugevent', 'reports', ['end_bugevent'], unique=False)
op.drop_column('reports', 'detection_status')
op.add_column('files', sa.Column('run_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('files', sa.Column('inc_count', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key(u'fk_files_run_id_runs', 'files', 'runs', ['run_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True)
op.add_column('bug_report_points', sa.Column('next', sa.INTEGER(), autoincrement=False, nullable=True))
op.drop_constraint(op.f('fk_bug_report_points_report_id_reports'), 'bug_report_points', type_='foreignkey')
op.drop_index(op.f('ix_bug_report_points_report_id'), table_name='bug_report_points')
op.drop_column('bug_report_points', 'report_id')
op.drop_column('bug_report_points', 'order')
op.add_column('bug_path_events', sa.Column('next', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('bug_path_events', sa.Column('prev', sa.INTEGER(), autoincrement=False, nullable=True))
op.drop_constraint(op.f('fk_bug_path_events_report_id_reports'), 'bug_path_events', type_='foreignkey')
op.drop_index(op.f('ix_bug_path_events_report_id'), table_name='bug_path_events')
op.drop_column('bug_path_events', 'report_id')
op.drop_column('bug_path_events', 'order')
### end Alembic commands ###
25 changes: 20 additions & 5 deletions libcodechecker/analyze/analyzers/result_handler_plist_to_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,35 @@ def __store_bugs(self, files, reports, client):
hasher = sha256()
hasher.update(source)
content_hash = hasher.hexdigest()
file_descriptor = client.needFileContent(file_name, content_hash)
file_descriptor = client.needFileContent(file_name,
content_hash,
self.__run_id)
file_ids[file_name] = file_descriptor.fileId

if file_descriptor.needed:
source64 = base64.b64encode(source)
res = client.addFileContent(content_hash,
source64,
Encoding.BASE64)
Encoding.BASE64,
self.__run_id)
if not res:
LOG.debug("Failed to store file content")
with codecs.open(file_name, 'r', 'UTF-8') as source_file:
file_content = source_file.read()
# WARN the right content encoding is needed for thrift!
source = codecs.encode(file_content, 'utf-8')
# TODO: we may not use the file content in the end
# depending on skippaths.

source64 = base64.b64encode(source)
res = client.addFileContent(file_descriptor.fileId,
source64,
Encoding.BASE64,
self.__run_id)
if not res:
LOG.debug("Failed to store file content")

# Skipping reports in header files handled here.
report_ids = []
for report in reports:
events = [i for i in report.bug_path if i.get('kind') == 'event']

Expand Down Expand Up @@ -174,7 +190,6 @@ def __store_bugs(self, files, reports, client):
severity)

LOG.debug("Storing done for report " + str(report_id))
report_ids.append(report_id)

# Check for suppress comment.
supp = sp_handler.get_suppressed()
Expand All @@ -183,7 +198,7 @@ def __store_bugs(self, files, reports, client):
status = shared.ttypes.ReviewStatus.UNREVIEWED
client.changeReviewStatus(report_id, status, comment)

def handle_results(self, client=None):
def handle_results(self, client):
"""
Send the plist content to the database.
Server API calls should be used in one connection.
Expand Down
8 changes: 5 additions & 3 deletions libcodechecker/database_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,11 @@ def create_engine(connection_string):

if make_url(connection_string).drivername == 'sqlite+pysqlite':
# FIXME: workaround for locking errors
return sqlalchemy.create_engine(connection_string,
encoding='utf8',
connect_args={'timeout': 600})
return sqlalchemy.create_engine(
connection_string,
encoding='utf8',
connect_args={'timeout': 600,
'check_same_thread': False})
else:
return sqlalchemy.create_engine(connection_string,
encoding='utf8')
Expand Down
4 changes: 2 additions & 2 deletions libcodechecker/libclient/thrift_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,11 +155,11 @@ def addReport(self, run_id, file_id, bug_hash, checker_message, bugpath,
pass

@ThriftClientCall
def needFileContent(self, filepath, content_hash):
def needFileContent(self, filepath, content_hash, run_id):
pass

@ThriftClientCall
def addFileContent(self, content_hash, content, encoding):
def addFileContent(self, content_hash, content, encoding, run_id):
pass

@ThriftClientCall
Expand Down
9 changes: 2 additions & 7 deletions libcodechecker/libhandlers/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,19 +415,14 @@ def main(args):
1,
callback=lambda results: res_handler(results)
).get(float('inf'))

pool.close()
except Exception:
pool.terminate()
raise # CodeChecker.py is the invoker, it will handle this.
finally:
pool.join()
os.chdir(original_cwd)

client.finishCheckerRun(context.run_id)

if len(check_durations) > 0:
client.setRunDuration(context.run_id,
# Round the duration to seconds.
int(sum(check_durations)))
return

client.finishCheckerRun(context.run_id)
10 changes: 7 additions & 3 deletions libcodechecker/orm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ class Run(Base):
name = Column(String)
version = Column(String)
command = Column(String)
inc_count = Column(Integer)
can_delete = Column(Boolean, nullable=False, server_default=true(),
default=True)

Expand All @@ -65,7 +64,6 @@ def __init__(self, name, version, command):
self.date, self.name, self.version, self.command = \
datetime.now(), name, version, command
self.duration = -1
self.inc_count = 0

def mark_finished(self):
self.duration = ceil((datetime.now() - self.date).total_seconds())
Expand Down Expand Up @@ -191,6 +189,11 @@ class Report(Base):

# TODO: multiple messages to multiple source locations?
checker_message = Column(String)
detection_status = Column(String)
# detection_status = Column(Enum('new',
# 'unresolved',
# 'resolved',
# 'reopened'))

# Cascade delete might remove rows SQLAlchemy warns about this
# to remove warnings about already deleted items set this to False.
Expand All @@ -200,7 +203,7 @@ class Report(Base):

# Priority/severity etc...
def __init__(self, run_id, bug_id, file_id, checker_message, checker_id,
checker_cat, bug_type, severity):
checker_cat, bug_type, severity, detection_status):
self.run_id = run_id
self.file_id = file_id
self.bug_id = bug_id
Expand All @@ -209,6 +212,7 @@ def __init__(self, run_id, bug_id, file_id, checker_message, checker_id,
self.checker_id = checker_id
self.checker_cat = checker_cat
self.bug_type = bug_type
self.detection_status = detection_status


class SkipPath(Base):
Expand Down
Loading

0 comments on commit a112911

Please sign in to comment.