From a112911e4f497fce2b0bcb9d342915f6f0d65100 Mon Sep 17 00:00:00 2001 From: bruntib Date: Mon, 10 Jul 2017 19:16:28 +0200 Subject: [PATCH] Detection status Store detection status for the reports in the database. --- api/report_server.thrift | 29 +- .../versions/41c3d07202db_detection_status.py | 74 ++ .../analyzers/result_handler_plist_to_db.py | 25 +- libcodechecker/database_handler.py | 8 +- libcodechecker/libclient/thrift_helper.py | 4 +- libcodechecker/libhandlers/store.py | 9 +- libcodechecker/orm_model.py | 10 +- .../server/client_db_access_handler.py | 650 ++++++++++++------ .../server/client_db_access_server.py | 13 +- .../report_viewer_api/test_hash_clash.py | 30 +- tests/functional/update/test_update_mode.py | 12 +- www/scripts/codecheckerviewer/ListOfBugs.js | 4 +- www/scripts/codecheckerviewer/ListOfRuns.js | 17 +- 13 files changed, 596 insertions(+), 289 deletions(-) create mode 100644 db_migrate/versions/41c3d07202db_detection_status.py diff --git a/api/report_server.thrift b/api/report_server.thrift index 1301299b28..6ac6d97caa 100644 --- a/api/report_server.thrift +++ b/api/report_server.thrift @@ -31,6 +31,8 @@ struct RunData{ 6: string runCmd, // the used check command 7: optional bool can_delete // true if codeCheckerDBAccess::removeRunResults() // is allowed on this run (see issue 151) + 8: map detectionStatusCount + // this maps the detection status to its count } typedef list RunDataList @@ -43,16 +45,17 @@ struct ReviewData{ //----------------------------------------------------------------------------- struct ReportData{ - 1: string checkerId, // the qualified id of the checker that reported this - 2: string bugHash, // This is unique id of the concrete report. - 3: string checkedFile, // this is a filepath - 4: string checkerMsg, // description of the bug report - 5: i64 reportId, // id of the report in the current run in the db - 6: i64 fileId, // unique id of the file the report refers to - 7: shared.BugPathEvent lastBugPosition // This contains the range and message of the last item in the symbolic - // execution step list. - 8: shared.Severity severity // checker severity - 9: ReviewData review // bug review status informations. + 1: string checkerId, // the qualified id of the checker that reported this + 2: string bugHash, // This is unique id of the concrete report. + 3: string checkedFile, // this is a filepath + 4: string checkerMsg, // description of the bug report + 5: i64 reportId, // id of the report in the current run in the db + 6: i64 fileId, // unique id of the file the report refers to + 7: shared.BugPathEvent lastBugPosition // This contains the range and message of the last item in the symbolic + // execution step list. + 8: shared.Severity severity // checker severity + 9: ReviewData review // bug review status informations. + 10: string detectionStatus // 'new', 'resolved', 'unresolved', 'reopened' } typedef list ReportDataList @@ -347,13 +350,15 @@ service codeCheckerDBAccess { // * If none of them matches a (new file_id, true) is returned. NeedFileResult needFileContent( 1: string filepath, - 2: string content_hash) + 2: string content_hash, + 3: i64 run_id) throws (1: shared.RequestFailed requestError), bool addFileContent( 1: string content_hash, 2: string file_content, - 3: optional Encoding encoding) + 3: optional Encoding encoding, + 4: i64 run_id) throws (1: shared.RequestFailed requestError), bool finishCheckerRun(1: i64 run_id) diff --git a/db_migrate/versions/41c3d07202db_detection_status.py b/db_migrate/versions/41c3d07202db_detection_status.py new file mode 100644 index 0000000000..d7a7a94cfe --- /dev/null +++ b/db_migrate/versions/41c3d07202db_detection_status.py @@ -0,0 +1,74 @@ +"""detection_status + +Revision ID: 41c3d07202db +Revises: 21a4ca1179da +Create Date: 2017-07-23 17:15:23.289229 + +""" + +# revision identifiers, used by Alembic. +revision = '41c3d07202db' +down_revision = '21a4ca1179da' +branch_labels = None +depends_on = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('bug_path_events', sa.Column('order', sa.Integer(), nullable=True)) + op.add_column('bug_path_events', sa.Column('report_id', sa.Integer(), nullable=True)) + op.create_index(op.f('ix_bug_path_events_report_id'), 'bug_path_events', ['report_id'], unique=False) + op.create_foreign_key(op.f('fk_bug_path_events_report_id_reports'), 'bug_path_events', 'reports', ['report_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.drop_column('bug_path_events', 'prev') + op.drop_column('bug_path_events', 'next') + op.add_column('bug_report_points', sa.Column('order', sa.Integer(), nullable=True)) + op.add_column('bug_report_points', sa.Column('report_id', sa.Integer(), nullable=True)) + op.create_index(op.f('ix_bug_report_points_report_id'), 'bug_report_points', ['report_id'], unique=False) + op.create_foreign_key(op.f('fk_bug_report_points_report_id_reports'), 'bug_report_points', 'reports', ['report_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.drop_column('bug_report_points', 'next') + op.drop_constraint(u'fk_files_run_id_runs', 'files', type_='foreignkey') + op.drop_column('files', 'inc_count') + op.drop_column('files', 'run_id') + op.add_column('reports', sa.Column('detection_status', sa.String(), nullable=True)) + op.drop_index('ix_reports_end_bugevent', table_name='reports') + op.drop_index('ix_reports_start_bugevent', table_name='reports') + op.drop_constraint(u'fk_reports_start_bugpoint_bug_report_points', 'reports', type_='foreignkey') + op.drop_constraint(u'fk_reports_start_bugevent_bug_path_events', 'reports', type_='foreignkey') + op.drop_constraint(u'fk_reports_end_bugevent_bug_path_events', 'reports', type_='foreignkey') + op.drop_column('reports', 'end_bugevent') + op.drop_column('reports', 'start_bugpoint') + op.drop_column('reports', 'start_bugevent') + op.drop_column('runs', 'inc_count') + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('runs', sa.Column('inc_count', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('reports', sa.Column('start_bugevent', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('reports', sa.Column('start_bugpoint', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('reports', sa.Column('end_bugevent', sa.INTEGER(), autoincrement=False, nullable=True)) + op.create_foreign_key(u'fk_reports_end_bugevent_bug_path_events', 'reports', 'bug_path_events', ['end_bugevent'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.create_foreign_key(u'fk_reports_start_bugevent_bug_path_events', 'reports', 'bug_path_events', ['start_bugevent'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.create_foreign_key(u'fk_reports_start_bugpoint_bug_report_points', 'reports', 'bug_report_points', ['start_bugpoint'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.create_index('ix_reports_start_bugevent', 'reports', ['start_bugevent'], unique=False) + op.create_index('ix_reports_end_bugevent', 'reports', ['end_bugevent'], unique=False) + op.drop_column('reports', 'detection_status') + op.add_column('files', sa.Column('run_id', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('files', sa.Column('inc_count', sa.INTEGER(), autoincrement=False, nullable=True)) + op.create_foreign_key(u'fk_files_run_id_runs', 'files', 'runs', ['run_id'], ['id'], ondelete=u'CASCADE', initially=u'DEFERRED', deferrable=True) + op.add_column('bug_report_points', sa.Column('next', sa.INTEGER(), autoincrement=False, nullable=True)) + op.drop_constraint(op.f('fk_bug_report_points_report_id_reports'), 'bug_report_points', type_='foreignkey') + op.drop_index(op.f('ix_bug_report_points_report_id'), table_name='bug_report_points') + op.drop_column('bug_report_points', 'report_id') + op.drop_column('bug_report_points', 'order') + op.add_column('bug_path_events', sa.Column('next', sa.INTEGER(), autoincrement=False, nullable=True)) + op.add_column('bug_path_events', sa.Column('prev', sa.INTEGER(), autoincrement=False, nullable=True)) + op.drop_constraint(op.f('fk_bug_path_events_report_id_reports'), 'bug_path_events', type_='foreignkey') + op.drop_index(op.f('ix_bug_path_events_report_id'), table_name='bug_path_events') + op.drop_column('bug_path_events', 'report_id') + op.drop_column('bug_path_events', 'order') + ### end Alembic commands ### diff --git a/libcodechecker/analyze/analyzers/result_handler_plist_to_db.py b/libcodechecker/analyze/analyzers/result_handler_plist_to_db.py index 8b2e9876a9..de7c3d7731 100644 --- a/libcodechecker/analyze/analyzers/result_handler_plist_to_db.py +++ b/libcodechecker/analyze/analyzers/result_handler_plist_to_db.py @@ -53,19 +53,35 @@ def __store_bugs(self, files, reports, client): hasher = sha256() hasher.update(source) content_hash = hasher.hexdigest() - file_descriptor = client.needFileContent(file_name, content_hash) + file_descriptor = client.needFileContent(file_name, + content_hash, + self.__run_id) file_ids[file_name] = file_descriptor.fileId if file_descriptor.needed: source64 = base64.b64encode(source) res = client.addFileContent(content_hash, source64, - Encoding.BASE64) + Encoding.BASE64, + self.__run_id) if not res: LOG.debug("Failed to store file content") + with codecs.open(file_name, 'r', 'UTF-8') as source_file: + file_content = source_file.read() + # WARN the right content encoding is needed for thrift! + source = codecs.encode(file_content, 'utf-8') + # TODO: we may not use the file content in the end + # depending on skippaths. + + source64 = base64.b64encode(source) + res = client.addFileContent(file_descriptor.fileId, + source64, + Encoding.BASE64, + self.__run_id) + if not res: + LOG.debug("Failed to store file content") # Skipping reports in header files handled here. - report_ids = [] for report in reports: events = [i for i in report.bug_path if i.get('kind') == 'event'] @@ -174,7 +190,6 @@ def __store_bugs(self, files, reports, client): severity) LOG.debug("Storing done for report " + str(report_id)) - report_ids.append(report_id) # Check for suppress comment. supp = sp_handler.get_suppressed() @@ -183,7 +198,7 @@ def __store_bugs(self, files, reports, client): status = shared.ttypes.ReviewStatus.UNREVIEWED client.changeReviewStatus(report_id, status, comment) - def handle_results(self, client=None): + def handle_results(self, client): """ Send the plist content to the database. Server API calls should be used in one connection. diff --git a/libcodechecker/database_handler.py b/libcodechecker/database_handler.py index e0dfda3bd7..4c199aa546 100644 --- a/libcodechecker/database_handler.py +++ b/libcodechecker/database_handler.py @@ -147,9 +147,11 @@ def create_engine(connection_string): if make_url(connection_string).drivername == 'sqlite+pysqlite': # FIXME: workaround for locking errors - return sqlalchemy.create_engine(connection_string, - encoding='utf8', - connect_args={'timeout': 600}) + return sqlalchemy.create_engine( + connection_string, + encoding='utf8', + connect_args={'timeout': 600, + 'check_same_thread': False}) else: return sqlalchemy.create_engine(connection_string, encoding='utf8') diff --git a/libcodechecker/libclient/thrift_helper.py b/libcodechecker/libclient/thrift_helper.py index 286011c394..f5a96febbc 100644 --- a/libcodechecker/libclient/thrift_helper.py +++ b/libcodechecker/libclient/thrift_helper.py @@ -155,11 +155,11 @@ def addReport(self, run_id, file_id, bug_hash, checker_message, bugpath, pass @ThriftClientCall - def needFileContent(self, filepath, content_hash): + def needFileContent(self, filepath, content_hash, run_id): pass @ThriftClientCall - def addFileContent(self, content_hash, content, encoding): + def addFileContent(self, content_hash, content, encoding, run_id): pass @ThriftClientCall diff --git a/libcodechecker/libhandlers/store.py b/libcodechecker/libhandlers/store.py index 548fca6e16..0cff2539e7 100644 --- a/libcodechecker/libhandlers/store.py +++ b/libcodechecker/libhandlers/store.py @@ -415,19 +415,14 @@ def main(args): 1, callback=lambda results: res_handler(results) ).get(float('inf')) - pool.close() - except Exception: - pool.terminate() - raise # CodeChecker.py is the invoker, it will handle this. finally: pool.join() os.chdir(original_cwd) - client.finishCheckerRun(context.run_id) - if len(check_durations) > 0: client.setRunDuration(context.run_id, # Round the duration to seconds. int(sum(check_durations))) - return + + client.finishCheckerRun(context.run_id) diff --git a/libcodechecker/orm_model.py b/libcodechecker/orm_model.py index aa6b57705a..d543b4fc87 100644 --- a/libcodechecker/orm_model.py +++ b/libcodechecker/orm_model.py @@ -53,7 +53,6 @@ class Run(Base): name = Column(String) version = Column(String) command = Column(String) - inc_count = Column(Integer) can_delete = Column(Boolean, nullable=False, server_default=true(), default=True) @@ -65,7 +64,6 @@ def __init__(self, name, version, command): self.date, self.name, self.version, self.command = \ datetime.now(), name, version, command self.duration = -1 - self.inc_count = 0 def mark_finished(self): self.duration = ceil((datetime.now() - self.date).total_seconds()) @@ -191,6 +189,11 @@ class Report(Base): # TODO: multiple messages to multiple source locations? checker_message = Column(String) + detection_status = Column(String) + # detection_status = Column(Enum('new', + # 'unresolved', + # 'resolved', + # 'reopened')) # Cascade delete might remove rows SQLAlchemy warns about this # to remove warnings about already deleted items set this to False. @@ -200,7 +203,7 @@ class Report(Base): # Priority/severity etc... def __init__(self, run_id, bug_id, file_id, checker_message, checker_id, - checker_cat, bug_type, severity): + checker_cat, bug_type, severity, detection_status): self.run_id = run_id self.file_id = file_id self.bug_id = bug_id @@ -209,6 +212,7 @@ def __init__(self, run_id, bug_id, file_id, checker_message, checker_id, self.checker_id = checker_id self.checker_cat = checker_cat self.bug_type = bug_type + self.detection_status = detection_status class SkipPath(Base): diff --git a/libcodechecker/server/client_db_access_handler.py b/libcodechecker/server/client_db_access_handler.py index b11536c05f..949272a658 100644 --- a/libcodechecker/server/client_db_access_handler.py +++ b/libcodechecker/server/client_db_access_handler.py @@ -12,6 +12,8 @@ from collections import defaultdict import datetime import os +import threading +import time import zlib import sqlalchemy @@ -93,13 +95,87 @@ def bugreportpoint_db_to_api(brp): fileId=brp.file_id) +class StorageSession: + '''This class is a singleton which helps to handle a transaction which + belong to the checking of an entire run. This class holds the SQLAlchemy + session for the run being checked and the set of touched reports. This + latter one is needed so at the end the detection status of the rest reports + can be set to "resolved".''' + + class __StorageSession: + def __init__(self): + self.__sessions = dict() + self._timeout_sessions() + + def start_run_session(self, run_id, transaction): + self.__sessions[run_id] = { + 'touched_reports': set(), + 'transaction': transaction, + 'timer': time.time()} + + def end_run_session(self, run_id): + this_session = self.__sessions[run_id] + transaction = this_session['transaction'] + + # Set resolved reports + + transaction.query(Report) \ + .filter(Report.run_id == run_id, + Report.id.notin_(this_session['touched_reports'])) \ + .update({Report.detection_status: 'resolved'}, + synchronize_session='fetch') + + transaction.commit() + transaction.close() + + del self.__sessions[run_id] + + def abort_session(self, run_id): + transaction = self.__sessions[run_id]['transaction'] + transaction.rollback() + transaction.close() + del self.__sessions[run_id] + + def touch_report(self, run_id, report_id): + self.__sessions[run_id]['touched_reports'].add(report_id) + + def is_touched(self, run_id, report_id): + return report_id in self.__sessions[run_id]['touched_reports'] + + def has_ongoing_run(self, run_id): + return run_id in self.__sessions + + def get_transaction(self, run_id): + self.__sessions[run_id]['timer'] = time.time() + return self.__sessions[run_id]['transaction'] + + def _timeout_sessions(self): + for run_id, session in self.__sessions.iteritems(): + if int(time.time() - session['timer']) > 10: + LOG.info('Session timeout for run ' + str(run_id)) + self.abort_session(run_id) + break + + threading.Timer(10, self._timeout_sessions).start() + + instance = None + + def __init__(self): + if not StorageSession.instance: + StorageSession.instance = \ + StorageSession.__StorageSession() + + def __getattr__(self, name): + return getattr(self.instance, name) + + class ThriftRequestHandler(object): """ Connect to database and handle thrift client requests. """ def __init__(self, - session, + Session, auth_session, checker_md_docs, checker_md_docs_map, @@ -112,7 +188,8 @@ def __init__(self, self.__checker_doc_map = checker_md_docs_map self.__suppress_handler = suppress_handler self.__package_version = package_version - self.__session = session + self.__Session = Session + self.__storage_session = StorageSession() self.report_ident = sqlalchemy.orm.query.Bundle('report_ident', Report.id, Report.bug_id, @@ -124,18 +201,31 @@ def __lastBugEventPos(self, report_id): belongs to the given report. If no such event is found then None returns. """ - last = self.__session.query(BugPathEvent) \ - .filter(BugPathEvent.report_id == report_id) \ - .order_by(BugPathEvent.order.desc()) \ - .limit(1).one_or_none() + try: + session = self.__Session() + + last = session.query(BugPathEvent) \ + .filter(BugPathEvent.report_id == report_id) \ + .order_by(BugPathEvent.order.desc()) \ + .limit(1).all() - if not last: - return None + if len(last) < 1: + return None - bpe = bugpathevent_db_to_api(last) - bpe.filePath = self.__session.query(File).get(bpe.fileId).filepath + last = last[0] - return bpe + bpe = bugpathevent_db_to_api(last) + bpe.filePath = session.query(File).get(bpe.fileId).filepath + + return bpe + except sqlalchemy.exc.SQLAlchemyError as ex: + msg = str(ex) + LOG.error(msg) + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.DATABASE, + msg) + finally: + session.close() def __sortResultsQuery(self, query, sort_types=None): """ @@ -164,9 +254,10 @@ def __sortResultsQuery(self, query, sort_types=None): @timeit def getRunData(self, run_name_filter): - session = self.__session - results = [] try: + results = [] + session = self.__Session() + # Count the reports subquery. stmt = session.query(Report.run_id, func.count(literal_column('*')).label( @@ -182,6 +273,16 @@ def getRunData(self, run_name_filter): q = q.outerjoin(stmt, Run.id == stmt.c.run_id) \ .order_by(Run.date) + status_q = session.query(Report.run_id, + Report.detection_status, + func.count(literal_column('*')) + .label('status_count')) \ + .group_by(Report.run_id, Report.detection_status) + + status_sum = defaultdict(defaultdict) + for run_id, status, count in status_q: + status_sum[run_id][status] = count + for instance, reportCount in q: if reportCount is None: reportCount = 0 @@ -191,20 +292,25 @@ def getRunData(self, run_name_filter): instance.name, instance.duration, reportCount, - instance.command + instance.command, + None, + status_sum[instance.id] )) return results except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: - LOG.error(str(alchemy_ex)) + msg = str(alchemy_ex) + LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, - str(alchemy_ex)) + msg) + finally: + session.close() @timeit def getReport(self, reportId): - session = self.__session - try: + session = self.__Session() + result = session.query(Report, File, ReviewStatus) \ @@ -243,7 +349,8 @@ def getReport(self, reportId): lastBugPosition=self.__lastBugEventPos(report.id), checkerId=report.checker_id, severity=report.severity, - review=review_data) + review=review_data, + detectionStatus=report.detection_status) except sqlalchemy.exc.SQLAlchemyError as alchemy_ex: msg = str(alchemy_ex) LOG.error(msg) @@ -253,7 +360,6 @@ def getReport(self, reportId): @timeit def getRunResults(self, run_id, limit, offset, sort_types, report_filters): - max_query_limit = constants.MAX_QUERY_SIZE if limit > max_query_limit: LOG.debug('Query limit ' + str(limit) + @@ -262,10 +368,10 @@ def getRunResults(self, run_id, limit, offset, sort_types, report_filters): str(max_query_limit)) limit = max_query_limit - session = self.__session filter_expression = construct_report_filter(report_filters) try: + session = self.__Session() q = session.query(Report, File, @@ -306,7 +412,8 @@ def getRunResults(self, run_id, limit, offset, sort_types, report_filters): report.id), checkerId=report.checker_id, severity=report.severity, - review=review_data) + review=review_data, + detectionStatus=report.detection_status) ) return results @@ -316,14 +423,17 @@ def getRunResults(self, run_id, limit, offset, sort_types, report_filters): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def getRunResultCount(self, run_id, report_filters): filter_expression = construct_report_filter(report_filters) - session = self.__session try: + session = self.__Session() + reportCount = session.query(Report) \ .filter(Report.run_id == run_id) \ .outerjoin(File, Report.file_id == File.id) \ @@ -340,6 +450,8 @@ def getRunResultCount(self, run_id, report_filters): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def __construct_bug_item_list(self, session, report_id, item_type): @@ -363,8 +475,9 @@ def getReportDetails(self, reportId): - reportId """ - session = self.__session try: + session = self.__Session() + report = session.query(Report).get(reportId) events = self.__construct_bug_item_list(session, @@ -393,14 +506,17 @@ def getReportDetails(self, reportId): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def changeReviewStatus(self, report_id, status, message): """ Change review status of the bug by report id. """ - session = self.__session try: + session = self.__Session() + report = session.query(Report).get(report_id) if report: review_status = session.query(ReviewStatus).get(report.bug_id) @@ -437,8 +553,8 @@ def getComments(self, report_id): """ Return the list of comments for the given bug. """ - session = self.__session try: + session = self.__Session() report = session.query(Report).get(report_id) if report: result = [] @@ -474,14 +590,16 @@ def getComments(self, report_id): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.IOERROR, msg) + finally: + session.close() @timeit def getCommentCount(self, report_id): """ Return the number of comments for the given bug. """ - session = self.__session try: + session = self.__Session() report = session.query(Report).get(report_id) if report: commentCount = session.query(Comment) \ @@ -503,13 +621,15 @@ def getCommentCount(self, report_id): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.IOERROR, msg) + finally: + session.close() @timeit def addComment(self, report_id, comment_data): """ Add new comment for the given bug. """ - session = self.__session + session = self.__Session() try: report = session.query(Report).get(report_id) if report: @@ -549,10 +669,12 @@ def updateComment(self, comment_id, content): comments to be updated by it's original author only, except for Anyonymous comments that can be updated by anybody. """ - session = self.__session - user = self.__auth_session.user\ - if self.__auth_session else "Anonymous" try: + session = self.__Session() + + user = self.__auth_session.user \ + if self.__auth_session else "Anonymous" + comment = session.query(Comment).get(comment_id) if comment: if comment.author != 'Anonymous' and comment.author != user: @@ -580,6 +702,8 @@ def updateComment(self, comment_id, content): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.IOERROR, msg) + finally: + session.close() @timeit def removeComment(self, comment_id): @@ -588,10 +712,12 @@ def removeComment(self, comment_id): original author only, except for Anyonymous comments that can be updated by anybody. """ - user = self.__auth_session.user\ - if self.__auth_session else "Anonymous" - session = self.__session try: + session = self.__Session() + + user = self.__auth_session.user \ + if self.__auth_session else "Anonymous" + comment = session.query(Comment).get(comment_id) if comment: if comment.author != 'Anonymous' and comment.author != user: @@ -618,6 +744,8 @@ def removeComment(self, comment_id): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.IOERROR, msg) + finally: + session.close() def getCheckerDoc(self, checkerId): """ @@ -656,8 +784,9 @@ def getCheckerConfigs(self, run_id): Parameters: - run_id """ - session = self.__session try: + session = self.__Session() + configs = session.query(Config) \ .filter(Config.run_id == run_id) \ .all() @@ -675,11 +804,14 @@ def getCheckerConfigs(self, run_id): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def getSkipPaths(self, run_id): - session = self.__session try: + session = self.__Session() + suppressed_paths = session.query(SkipPath) \ .filter(SkipPath.run_id == run_id) \ .all() @@ -697,6 +829,8 @@ def getSkipPaths(self, run_id): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def getSourceFileData(self, fileId, fileContent, encoding): @@ -706,8 +840,8 @@ def getSourceFileData(self, fileId, fileContent, encoding): - fileContent - enum Encoding """ - session = self.__session try: + session = self.__Session() sourcefile = session.query(File).get(fileId) if sourcefile is None: @@ -734,12 +868,14 @@ def getSourceFileData(self, fileId, fileContent, encoding): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() @timeit def getRunResultTypes(self, run_id, report_filters): - session = self.__session try: + session = self.__Session() filter_expression = construct_report_filter(report_filters) @@ -774,6 +910,8 @@ def getRunResultTypes(self, run_id, report_filters): LOG.error(msg) raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, msg) + finally: + session.close() # ----------------------------------------------------------------------- @timeit @@ -825,6 +963,9 @@ def __queryDiffResults(self, .outerjoin(File, Report.file_id == File.id) \ .outerjoin(ReviewStatus, ReviewStatus.bug_hash == Report.bug_id) \ + .outerjoin(SuppressBug, + and_(SuppressBug.hash == Report.bug_id, + SuppressBug.run_id == run_id)) \ .filter(Report.bug_id.in_(diff_hash_list)) \ .filter(filter_expression) @@ -857,7 +998,9 @@ def __queryDiffResults(self, lastBugPosition=self.__lastBugEventPos(report.id), checkerId=report.checker_id, severity=report.severity, - review=review_data)) + review=review_data, + detectionStatus=report + .detection_status)) return results @@ -877,7 +1020,7 @@ def getNewResults(self, sort_types=None, report_filters=None): - session = self.__session + session = self.__Session() base_line_hashes, new_check_hashes = \ self.__get_hashes_for_diff(session, @@ -890,15 +1033,19 @@ def getNewResults(self, LOG.debug(diff_hashes) if len(diff_hashes) == 0: + session.close() return [] - return self.__queryDiffResults(session, - diff_hashes, - new_run_id, - limit, - offset, - sort_types, - report_filters) + result = self.__queryDiffResults(session, + diff_hashes, + new_run_id, + limit, + offset, + sort_types, + report_filters) + + session.close() + return result # ----------------------------------------------------------------------- @timeit @@ -910,7 +1057,7 @@ def getResolvedResults(self, sort_types=None, report_filters=None): - session = self.__session + session = self.__Session() base_line_hashes, new_check_hashes = \ self.__get_hashes_for_diff(session, base_run_id, @@ -922,15 +1069,18 @@ def getResolvedResults(self, LOG.debug(diff_hashes) if len(diff_hashes) == 0: + session.close() return [] - return self.__queryDiffResults(session, - diff_hashes, - base_run_id, - limit, - offset, - sort_types, - report_filters) + result = self.__queryDiffResults(session, + diff_hashes, + base_run_id, + limit, + offset, + sort_types, + report_filters) + session.close() + return result # ----------------------------------------------------------------------- @timeit @@ -942,7 +1092,7 @@ def getUnresolvedResults(self, sort_types=None, report_filters=None): - session = self.__session + session = self.__Session() base_line_hashes, new_check_hashes = \ self.__get_hashes_for_diff(session, base_run_id, @@ -954,15 +1104,17 @@ def getUnresolvedResults(self, LOG.debug(diff_hashes) if len(diff_hashes) == 0: + session.close() return [] - return self.__queryDiffResults(session, - diff_hashes, - new_run_id, - limit, - offset, - sort_types, - report_filters) + result = self.__queryDiffResults(session, + diff_hashes, + new_run_id, + limit, + offset, + sort_types, + report_filters) + return result # ----------------------------------------------------------------------- @timeit @@ -979,7 +1131,7 @@ def getPackageVersion(self): @timeit def removeRunResults(self, run_ids): - session = self.__session + session = self.__Session() runs_to_delete = [] for run_id in run_ids: @@ -1008,6 +1160,7 @@ def removeRunResults(self, run_ids): session.query(FileContent).filter(not_(FileContent.content_hash.in_( select([File.content_hash])))).delete(synchronize_session=False) session.commit() + session.close() return True # ----------------------------------------------------------------------- @@ -1062,40 +1215,47 @@ def getDiffResultCount(self, Count the diff results. """ - session = self.__session - base_line_hashes, new_check_hashes = \ - self.__get_hashes_for_diff(session, - base_run_id, - new_run_id) + try: + session = self.__Session() + base_line_hashes, new_check_hashes = \ + self.__get_hashes_for_diff(session, + base_run_id, + new_run_id) + + if diff_type == DiffType.NEW: + diff_hashes = list( + new_check_hashes.difference(base_line_hashes)) + if not diff_hashes: + return 0 + run_id = new_run_id + + elif diff_type == DiffType.RESOLVED: + diff_hashes = list( + base_line_hashes.difference(new_check_hashes)) + if not diff_hashes: + return 0 + run_id = base_run_id + + elif diff_type == DiffType.UNRESOLVED: + diff_hashes = list( + base_line_hashes.intersection(new_check_hashes)) + if not diff_hashes: + return 0 + run_id = new_run_id - if diff_type == DiffType.NEW: - diff_hashes = list(new_check_hashes.difference(base_line_hashes)) - if not diff_hashes: - return 0 - run_id = new_run_id - - elif diff_type == DiffType.RESOLVED: - diff_hashes = list(base_line_hashes.difference(new_check_hashes)) - if not diff_hashes: - return 0 - run_id = base_run_id - - elif diff_type == DiffType.UNRESOLVED: - diff_hashes = list(base_line_hashes.intersection(new_check_hashes)) - if not diff_hashes: - return 0 - run_id = new_run_id - - else: - msg = 'Unsupported diff type: ' + str(diff_type) - LOG.error(msg) - raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, - msg) + else: + msg = 'Unsupported diff type: ' + str(diff_type) + LOG.error(msg) + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.DATABASE, + msg) - return self.__queryDiffResultsCount(session, - diff_hashes, - run_id, - report_filters) + return self.__queryDiffResultsCount(session, + diff_hashes, + run_id, + report_filters) + finally: + session.close() # ----------------------------------------------------------------------- def __queryDiffResultTypes(self, @@ -1150,40 +1310,47 @@ def getDiffResultTypes(self, diff_type, report_filters): - session = self.__session - base_line_hashes, new_check_hashes = \ - self.__get_hashes_for_diff(session, - base_run_id, - new_run_id) + try: + session = self.__Session() + base_line_hashes, new_check_hashes = \ + self.__get_hashes_for_diff(session, + base_run_id, + new_run_id) + + if diff_type == DiffType.NEW: + diff_hashes = list( + new_check_hashes.difference(base_line_hashes)) + if not diff_hashes: + return diff_hashes + run_id = new_run_id + + elif diff_type == DiffType.RESOLVED: + diff_hashes = list( + base_line_hashes.difference(new_check_hashes)) + if not diff_hashes: + return diff_hashes + run_id = base_run_id + + elif diff_type == DiffType.UNRESOLVED: + diff_hashes = list( + base_line_hashes.intersection(new_check_hashes)) + if not diff_hashes: + return diff_hashes + run_id = new_run_id - if diff_type == DiffType.NEW: - diff_hashes = list(new_check_hashes.difference(base_line_hashes)) - if not diff_hashes: - return diff_hashes - run_id = new_run_id - - elif diff_type == DiffType.RESOLVED: - diff_hashes = list(base_line_hashes.difference(new_check_hashes)) - if not diff_hashes: - return diff_hashes - run_id = base_run_id - - elif diff_type == DiffType.UNRESOLVED: - diff_hashes = list(base_line_hashes.intersection(new_check_hashes)) - if not diff_hashes: - return diff_hashes - run_id = new_run_id - - else: - msg = 'Unsupported diff type: ' + str(diff_type) - LOG.error(msg) - raise shared.ttypes.RequestFailed(shared.ttypes.ErrorCode.DATABASE, - msg) + else: + msg = 'Unsupported diff type: ' + str(diff_type) + LOG.error(msg) + raise shared.ttypes.RequestFailed( + shared.ttypes.ErrorCode.DATABASE, + msg) - return self.__queryDiffResultTypes(session, - diff_hashes, - run_id, - report_filters) + return self.__queryDiffResultTypes(session, + diff_hashes, + run_id, + report_filters) + finally: + session.close() @timeit def addCheckerRun(self, command, name, version, force): @@ -1194,7 +1361,13 @@ def addCheckerRun(self, command, name, version, force): """ try: LOG.debug("adding checker run") - run = self.__session.query(Run).filter(Run.name == name).first() + + session = self.__Session() + run = session.query(Run).filter(Run.name == name).first() + + if run and self.__storage_session.has_ongoing_run(run.id): + raise Exception('Storage of ' + name + ' is already going') + if run and force: # Clean already collected results. if not run.can_delete: @@ -1206,29 +1379,31 @@ def addCheckerRun(self, command, name, version, force): msg) LOG.info('Removing previous analysis results ...') - self.__session.delete(run) - self.__session.commit() + session.delete(run) checker_run = Run(name, version, command) - self.__session.add(checker_run) - self.__session.commit() - return checker_run.id + session.add(checker_run) + session.flush() + run_id = checker_run.id elif run: # There is already a run, update the results. run.date = datetime.now() # Increment update counter and the command. - run.inc_count += 1 run.command = command - self.__session.commit() - return run.id + session.flush() + run_id = run.id else: # There is no run create new. checker_run = Run(name, version, command) - self.__session.add(checker_run) - self.__session.commit() - return checker_run.id + session.add(checker_run) + session.flush() + run_id = checker_run.id + + self.__storage_session.start_run_session(run_id, session) + return run_id except Exception as ex: + session.close() raise shared.ttypes.RequestFailed( shared.ttypes.ErrorCode.GENERAL, str(ex)) @@ -1238,13 +1413,18 @@ def finishCheckerRun(self, run_id): """ """ try: + session = self.__storage_session.get_transaction(run_id) + LOG.debug("Finishing checker run") - run = self.__session.query(Run).get(run_id) + run = session.query(Run).get(run_id) if not run: + self.__storage_session.abort_session(run_id) return False run.mark_finished() - self.__session.commit() + + self.__storage_session.end_run_session(run_id) + return True except Exception as ex: @@ -1256,13 +1436,14 @@ def setRunDuration(self, run_id, duration): """ """ try: + session = self.__storage_session.get_transaction(run_id) + LOG.debug("setting run duartion") - run = self.__session.query(Run).get(run_id) + run = session.query(Run).get(run_id) if not run: return False run.duration = duration - self.__session.commit() return True except Exception as ex: LOG.error(ex) @@ -1275,8 +1456,9 @@ def replaceConfigInfo(self, run_id, config_values): new values. """ try: + session = self.__Session() LOG.debug("Replacing config info") - count = self.__session.query(Config) \ + count = session.query(Config) \ .filter(Config.run_id == run_id) \ .delete() LOG.debug('Config: ' + str(count) + ' removed item.') @@ -1284,75 +1466,76 @@ def replaceConfigInfo(self, run_id, config_values): configs = [Config( run_id, info.checker_name, info.attribute, info.value) for info in config_values] - self.__session.bulk_save_objects(configs) - self.__session.commit() + session.bulk_save_objects(configs) + session.commit() return True except Exception as ex: LOG.error(ex) return False + finally: + session.close() @timeit - def needFileContent(self, filepath, content_hash): + def needFileContent(self, filepath, content_hash, run_id): """ """ - f = self.__session.query(File) \ + session = self.__storage_session.get_transaction(run_id) + f = session.query(File) \ .filter(and_(File.content_hash == content_hash, File.filepath == filepath)) \ .one_or_none() - needed = self.__session.query(FileContent).get(content_hash) is None + needed = session.query(FileContent).get(content_hash) is None if not f or needed: try: - self.__session.commit() if needed: # Avoid foreign key errors: add empty content. file_content = FileContent(content_hash, "") - self.__session.add(file_content) + session.add(file_content) f = File(filepath, content_hash) - self.__session.add(f) - self.__session.commit() + session.add(f) + session.flush() except sqlalchemy.exc.IntegrityError: # Other transaction might added the same file in the meantime. - self.__session.rollback() + session.rollback() return NeedFileResult(needed, f.id) @timeit - def addFileContent(self, content_hash, content, encoding): + def addFileContent(self, content_hash, content, encoding, run_id): """ """ + session = self.__storage_session.get_transaction(run_id) if encoding == Encoding.BASE64: content = base64.b64decode(content) compressed_content = zlib.compress(content, zlib.Z_BEST_COMPRESSION) - self.__session.commit() try: - file_content = self.__session.query(FileContent).get(content_hash) + file_content = session.query(FileContent).get(content_hash) file_content.content = compressed_content - self.__session.add(file_content) - self.__session.commit() + session.add(file_content) + session.flush() except sqlalchemy.exc.IntegrityError as ex: # Other transaction might added the same content in the meantime. - self.__session.rollback() + session.rollback() return False return True - def __is_same_event_path(self, report_id, events): + def __is_same_event_path(self, report_id, events, session): """ Checks if the given event path is the same as the one in the events argument. """ try: - q = self.__session.query(BugPathEvent) \ + q = session.query(BugPathEvent) \ .filter(BugPathEvent.report_id == report_id) \ .order_by(BugPathEvent.order) - len_events = len(events) for i, point2 in enumerate(q): - if i == len_events: + if i == len(events): return False point1 = events[i] @@ -1372,6 +1555,7 @@ def __is_same_event_path(self, report_id, events): @timeit def storeReportInfo(self, + session, run_id, file_id, bug_hash, @@ -1381,12 +1565,13 @@ def storeReportInfo(self, checker_id, checker_cat, bug_type, - severity): + severity, + detection_status='new'): """ """ try: LOG.debug("getting source file for report") - source_file = self.__session.query(File).get(file_id) + source_file = session.query(File).get(file_id) _, source_file_name = os.path.split(source_file.filepath) LOG.debug("initializing report") @@ -1397,17 +1582,16 @@ def storeReportInfo(self, checker_id, checker_cat, bug_type, - severity) + severity, + detection_status) - self.__session.add(report) - self.__session.flush() + session.add(report) + session.flush() LOG.debug("storing bug path") - self.__storeBugPath(bugpath, report.id) + self.__storeBugPath(session, bugpath, report.id) LOG.debug("storing events") - self.__storeBugEvents(events, report.id) - - self.__session.commit() + self.__storeBugEvents(session, events, report.id) return report.id except Exception as ex: @@ -1430,63 +1614,74 @@ def addReport(self, """ """ try: + session = self.__storage_session.get_transaction(run_id) + checker_id = checker_id or 'NOT FOUND' # TODO: performance issues when executing the following query on # large databases? - reports = self.__session.query(self.report_ident) \ + reports = session.query(self.report_ident) \ .filter(and_(self.report_ident.c.bug_id == bug_hash, self.report_ident.c.run_id == run_id)) - try: - # Check for duplicates by bug hash. - LOG.debug("checking duplicates") - if reports.count() != 0: - for possib_dup in reports: - LOG.debug("there is a possible duplicate") - # It's a duplicate or a hash clash. Check checker name, - # file id, and position. - dup_report_obj = self.__session.query(Report).get( - possib_dup.report_ident.id) - if dup_report_obj and \ - dup_report_obj.checker_id == checker_id and \ - dup_report_obj.file_id == file_id and \ - self.__is_same_event_path( - dup_report_obj.id, events): - # TODO: It is not clear why this commit is needed - # but if it is not here then the commit in - # finishCheckerRun() hangs. - self.__session.commit() - return dup_report_obj.id - - LOG.debug("no duplicate storing report") - return self.storeReportInfo(run_id, - file_id, - bug_hash, - msg, - bugpath, - events, - checker_id, - checker_cat, - bug_type, - severity) - - except sqlalchemy.exc.IntegrityError as ex: - self.__session.rollback() - - reports = self.__session.query(self.report_ident) \ - .filter(and_(self.report_ident.c.bug_id == bug_hash, - self.report_ident.c.run_id == run_id)) - if reports.count() != 0: - return reports.first().report_ident.id - else: - raise + # Check for duplicates by bug hash. + LOG.debug("checking duplicates") + for possib_dup in reports: + LOG.debug("there is a possible duplicate") + dup_report_obj = session.query(Report).get( + possib_dup.report_ident.id) + # TODO: file_id and path equality check won't be necessary + # when path hash is added. + if dup_report_obj and \ + dup_report_obj.checker_id == checker_id and \ + dup_report_obj.file_id == file_id and \ + self.__is_same_event_path( + dup_report_obj.id, + events, + session): + + new_status = None + + if dup_report_obj.detection_status == 'new' and not \ + self.__storage_session.is_touched( + run_id, + dup_report_obj.id): + new_status = 'unresolved' + elif dup_report_obj.detection_status == 'resolved': + new_status = 'reopened' + + if new_status: + dup_report_obj.detection_status = new_status + + self.__storage_session.touch_report( + run_id, + dup_report_obj.id) + + return dup_report_obj.id + + LOG.debug("no duplicate storing report") + report_id = self.storeReportInfo(session, + run_id, + file_id, + bug_hash, + msg, + bugpath, + events, + checker_id, + checker_cat, + bug_type, + severity, + suppress) + + self.__storage_session.touch_report(run_id, report_id) + + return report_id + except Exception as ex: - self.__session.rollback() raise shared.ttypes.RequestFailed( shared.ttypes.ErrorCode.GENERAL, str(ex)) - def __storeBugEvents(self, bugevents, report_id): + def __storeBugEvents(self, session, bugevents, report_id): """ """ for i, event in enumerate(bugevents): @@ -1498,10 +1693,9 @@ def __storeBugEvents(self, bugevents, report_id): event.msg, event.fileId, report_id) + session.add(bpe) - self.__session.add(bpe) - - def __storeBugPath(self, bugpath, report_id): + def __storeBugPath(self, session, bugpath, report_id): for i, piece in enumerate(bugpath): brp = BugReportPoint(piece.startLine, piece.startCol, @@ -1510,15 +1704,16 @@ def __storeBugPath(self, bugpath, report_id): i, piece.fileId, report_id) - - self.__session.add(brp) + session.add(brp) @timeit def addSkipPath(self, run_id, paths): """ """ try: - count = self.__session.query(SkipPath) \ + session = self.__storage_session.get_transaction(run_id) + + count = session.query(SkipPath) \ .filter(SkipPath.run_id == run_id) \ .delete() LOG.debug('SkipPath: ' + str(count) + ' removed item.') @@ -1527,8 +1722,7 @@ def addSkipPath(self, run_id, paths): for path, comment in paths.items(): skipPath = SkipPath(run_id, path, comment) skipPathList.append(skipPath) - self.__session.bulk_save_objects(skipPathList) - self.__session.commit() + session.bulk_save_objects(skipPathList) return True except Exception as ex: LOG.error(str(ex)) diff --git a/libcodechecker/server/client_db_access_server.py b/libcodechecker/server/client_db_access_server.py index 84f116aa48..4fab036654 100644 --- a/libcodechecker/server/client_db_access_server.py +++ b/libcodechecker/server/client_db_access_server.py @@ -51,7 +51,7 @@ class RequestHandler(SimpleHTTPRequestHandler): """ def __init__(self, request, client_address, server): - self.sc_session = server.sc_session + self.Session = server.Session self.db_version_info = server.db_version_info self.manager = server.manager @@ -189,8 +189,6 @@ def do_POST(self): # Authentication is handled, we may now respond to the user. try: - session = self.sc_session() - if self.path == '/Authentication': # Authentication requests must be routed to a different # handler. @@ -204,7 +202,8 @@ def do_POST(self): else: LOG.debug("Unauthenticated access.") - acc_handler = ThriftRequestHandler(session, auth_session, + acc_handler = ThriftRequestHandler(self.Session, + auth_session, checker_md_docs, checker_md_docs_map, suppress_handler, @@ -216,8 +215,6 @@ def do_POST(self): processor.process(iprot, oprot) result = otrans.getvalue() - self.sc_session.remove() - self.send_response(200) self.send_header("content-type", "application/x-thrift") self.send_header("Content-Length", len(result)) @@ -284,9 +281,7 @@ def __init__(self, self.__engine = database_handler.SQLServer.create_engine( db_conn_string) - Session = scoped_session(sessionmaker()) - Session.configure(bind=self.__engine) - self.sc_session = Session + self.Session = sessionmaker(bind=self.__engine) self.manager = manager self.__request_handlers = ThreadPool(processes=10) diff --git a/tests/functional/report_viewer_api/test_hash_clash.py b/tests/functional/report_viewer_api/test_hash_clash.py index badb486faa..8fd82b2dca 100644 --- a/tests/functional/report_viewer_api/test_hash_clash.py +++ b/tests/functional/report_viewer_api/test_hash_clash.py @@ -66,7 +66,7 @@ def _create_run(self, name): 'v', False) - def _create_file(self, name, cols=10, lines=10): + def _create_file(self, name, run_id, cols=10, lines=10): """Creates a new file with random content.""" path = name + "_" + str(uuid4()) @@ -74,12 +74,12 @@ def _create_file(self, name, cols=10, lines=10): hasher = sha256() hasher.update(content) content_hash = hasher.hexdigest() - need = self._report.needFileContent(path, content_hash) + need = self._report.needFileContent(path, content_hash, run_id) self.assertTrue(need.needed) content = base64.b64encode(content) success = self._report.addFileContent(content_hash, content, - Encoding.BASE64) + Encoding.BASE64, run_id) self.assertTrue(success) return need.fileId, path @@ -123,7 +123,7 @@ def _init_new_test(self, name): """ run_id = self._create_run(name) - file_id, source_file = self._create_file(name) + file_id, source_file = self._create_file(name, run_id) # analyzer type needs to match with the supported analyzer types # clangsa is used for testing @@ -140,10 +140,8 @@ def test_hash_clash(self): - Hash clash in different build actions. """ - with self._init_new_test('test1') as ids1, \ - self._init_new_test('test2') as ids2: + with self._init_new_test('test1') as ids1: run_id1, file_id1, _ = ids1 - run_id2, file_id2, source_file2 = ids2 rep_id1 = self._create_simple_report(file_id1, run_id1, 'XXX', @@ -162,6 +160,16 @@ def test_hash_clash(self): # Same file, same hash and different position in line self.assertEqual(rep_id1, rep_id3) + rep_id6 = self._create_simple_report(file_id1, + run_id1, + 'XXX', + ((1, 3), (1, 4))) + + # Same file, same hash and different position in column + self.assertNotEqual(rep_id1, rep_id6) + + with self._init_new_test('test2') as ids2: + run_id2, file_id2, source_file2 = ids2 rep_id4 = self._create_simple_report(file_id2, run_id2, 'XXX', @@ -170,6 +178,10 @@ def test_hash_clash(self): run_id2, 'YYY', ((1, 1), (1, 2))) + rep_id7 = self._create_simple_report(file_id1, + run_id2, + 'XXX', + ((1, 1), (1, 2))) # Different file, same hash, and position self.assertNotEqual(rep_id1, rep_id4) # Different file, same hash and different position @@ -177,8 +189,4 @@ def test_hash_clash(self): # Same file and position, different hash self.assertNotEqual(rep_id4, rep_id5) - rep_id7 = self._create_simple_report(file_id1, - run_id2, - 'XXX', - ((1, 1), (1, 2))) self.assertNotEqual(rep_id1, rep_id7) diff --git a/tests/functional/update/test_update_mode.py b/tests/functional/update/test_update_mode.py index dbc12f1f7f..88003ec220 100644 --- a/tests/functional/update/test_update_mode.py +++ b/tests/functional/update/test_update_mode.py @@ -79,12 +79,10 @@ def test_disable_checker(self): updated_results = get_all_run_results(self._cc_client, self._runid) all_bugs = self._testproject_data[self._clang_to_test]['bugs'] - deadcode_bugs = [bug for bug in all_bugs if bug['checker'] == deadcode] + deadcode_bugs = \ + [bug['hash'] for bug in all_bugs if bug['checker'] == deadcode] - # TODO: By removing build actions from the architecture, there is no - # update mode. If a run already contains a given bug then it is not - # overwritten. Later in "detection status" functionality the status of - # these bugs will be set to "resolved" and that will be checked. self.assertEquals(len(updated_results), len(all_bugs)) - # self.assertEquals(len(updated_results), - # len(all_bugs) - len(deadcode_bugs)) + self.assertTrue(all(map( + lambda b: b.detectionStatus == 'unresolved', + filter(lambda x: x in deadcode_bugs, updated_results)))) diff --git a/www/scripts/codecheckerviewer/ListOfBugs.js b/www/scripts/codecheckerviewer/ListOfBugs.js index 1a0a958fd9..766e4274b6 100644 --- a/www/scripts/codecheckerviewer/ListOfBugs.js +++ b/www/scripts/codecheckerviewer/ListOfBugs.js @@ -210,6 +210,7 @@ function (declare, dom, Deferred, ObjectStore, Store, QueryResults, topic, { name : 'Severity', field : 'severity' }, { name : 'Review status', field : 'reviewStatusHtml', cellClasses : 'review-status', width : '25%' }, { name : 'Review comment', cellClasses : 'review-comment-message compact', field : 'reviewComment', width : '50%' } + { name : 'Detection status', field : 'detectionStatus' }, ]; this.focused = true; @@ -233,7 +234,8 @@ function (declare, dom, Deferred, ObjectStore, Store, QueryResults, topic, return cell.field === 'checkedFile' || cell.field === 'checkerId' || - cell.field === 'severity'; + cell.field === 'severity' || + cell.field === 'detectionStatus'; }, scrollToLastSelected : function () { diff --git a/www/scripts/codecheckerviewer/ListOfRuns.js b/www/scripts/codecheckerviewer/ListOfRuns.js index bf53eb8f8b..829750b4c9 100644 --- a/www/scripts/codecheckerviewer/ListOfRuns.js +++ b/www/scripts/codecheckerviewer/ListOfRuns.js @@ -71,6 +71,15 @@ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, return container; } + function prettifyStatus(statusCounts) { + var stat = []; + + for (statusCount in statusCounts) + stat.push(statusCount + ' (' + statusCounts[statusCount] + ')'); + + return stat.join(', '); + } + var ListOfRunsGrid = declare(DataGrid, { constructor : function () { this.store = new ItemFileWriteStore({ @@ -85,6 +94,7 @@ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, { name : 'Number of bugs', field : 'numberofbugs', styles : 'text-align: center;', width : '20%' }, { name : 'Duration', field : 'duration', styles : 'text-align: center;' }, { name : 'Check command', field : 'checkcmd', styles : 'text-align: center;' }, + { name : 'Detection status', field : 'detectionstatus', styles : 'text-align: center;', width : '30%' }, { name : 'Delete', field : 'del', styles : 'text-align: center;', type : 'dojox.grid.cells.Bool', editable : true } ]; @@ -179,7 +189,12 @@ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, runData : runData, checkcmd : 'Show', del : false, +<<<<<<< HEAD diff : { 'runData' : runData, 'listOfRunsGrid' : this } +======= + diff : false, + detectionstatus : prettifyStatus(runData.detectionStatusCount) +>>>>>>> Detection status }); }, @@ -333,4 +348,4 @@ function (declare, domConstruct, ItemFileWriteStore, topic, Dialog, Button, onLoaded : function (runDataList) {} }); -}); \ No newline at end of file +});