Skip to content

Add the ability to only create finding groups when you have more than… #6916

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Oct 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion dojo/engagement/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,7 @@ def import_scan_results(request, eid=None, pid=None):
environment = Development_Environment.objects.get(id=environment_id)

group_by = form.cleaned_data.get('group_by', None)
create_finding_groups_for_all_findings = form.cleaned_data['create_finding_groups_for_all_findings']

# TODO move to form validation?
if scan and is_scan_file_too_large(scan):
Expand Down Expand Up @@ -668,7 +669,8 @@ def import_scan_results(request, eid=None, pid=None):
test, finding_count, closed_finding_count, _ = importer.import_scan(scan, scan_type, engagement, user, environment, active=active, verified=verified, tags=tags,
minimum_severity=minimum_severity, endpoints_to_add=list(form.cleaned_data['endpoints']) + added_endpoints, scan_date=scan_date,
version=version, branch_tag=branch_tag, build_id=build_id, commit_hash=commit_hash, push_to_jira=push_to_jira,
close_old_findings=close_old_findings, group_by=group_by, api_scan_configuration=api_scan_configuration, service=service)
close_old_findings=close_old_findings, group_by=group_by, api_scan_configuration=api_scan_configuration, service=service,
create_finding_groups_for_all_findings=create_finding_groups_for_all_findings)

message = f'{scan_type} processed a total of {finding_count} findings'

Expand Down
1 change: 1 addition & 0 deletions dojo/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,7 @@ class ImportScanForm(forms.Form):

if is_finding_groups_enabled():
group_by = forms.ChoiceField(required=False, choices=Finding_Group.GROUP_BY_OPTIONS, help_text='Choose an option to automatically group new findings by the chosen option.')
create_finding_groups_for_all_findings = forms.BooleanField(help_text="If unchecked, finding groups will only be created when there is more than one grouped finding", required=False, initial=True)

def __init__(self, *args, **kwargs):
super(ImportScanForm, self).__init__(*args, **kwargs)
Expand Down
37 changes: 28 additions & 9 deletions dojo/importers/importer/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,15 @@ def create_test(self, scan_type, test_type_name, engagement, lead, environment,
@dojo_async_task
@app.task(ignore_result=False)
def process_parsed_findings(self, test, parsed_findings, scan_type, user, active, verified, minimum_severity=None,
endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now(), service=None, scan_date=None, **kwargs):
endpoints_to_add=None, push_to_jira=None, group_by=None, now=timezone.now(), service=None, scan_date=None,
create_finding_groups_for_all_findings=True, **kwargs):
logger.debug('endpoints_to_add: %s', endpoints_to_add)
new_findings = []
items = parsed_findings
logger.debug('starting import of %i items.', len(items) if items else 0)
i = 0
group_names_to_findings_dict = {}

for item in items:
# FIXME hack to remove when all parsers have unit tests for this attribute
if item.severity.lower().startswith('info') and item.severity != 'Info':
Expand Down Expand Up @@ -109,7 +112,13 @@ def process_parsed_findings(self, test, parsed_findings, scan_type, user, active
item.save(dedupe_option=False)

if is_finding_groups_enabled() and group_by:
finding_helper.add_finding_to_auto_group(item, group_by, **kwargs)
# If finding groups are enabled, group all findings by group name
name = finding_helper.get_group_by_group_name(item, group_by)
if name is not None:
if name in group_names_to_findings_dict:
group_names_to_findings_dict[name].append(item)
else:
group_names_to_findings_dict[name] = [item]

if (hasattr(item, 'unsaved_req_resp') and
len(item.unsaved_req_resp) > 0):
Expand Down Expand Up @@ -159,14 +168,22 @@ def process_parsed_findings(self, test, parsed_findings, scan_type, user, active

new_findings.append(item)
# to avoid pushing a finding group multiple times, we push those outside of the loop
if is_finding_groups_enabled() and item.finding_group:
if is_finding_groups_enabled() and group_by:
item.save()
else:
item.save(push_to_jira=push_to_jira)

if is_finding_groups_enabled() and push_to_jira:
for finding_group in set([finding.finding_group for finding in new_findings if finding.finding_group is not None]):
jira_helper.push_to_jira(finding_group)
for (group_name, findings) in group_names_to_findings_dict.items():
# Only create a finding group if we have more than one finding for a given finding group, unless configured otherwise
if create_finding_groups_for_all_findings or len(findings) > 1:
for finding in findings:
finding_helper.add_finding_to_auto_group(finding, group_by, **kwargs)
if push_to_jira:
if findings[0].finding_group is not None:
jira_helper.push_to_jira(findings[0].finding_group)
else:
jira_helper.push_to_jira(findings[0])

sync = kwargs.get('sync', False)
if not sync:
return [serializers.serialize('json', [finding, ]) for finding in new_findings]
Expand Down Expand Up @@ -232,7 +249,7 @@ def close_old_findings(self, test, scan_date_time, user, push_to_jira=None, serv
def import_scan(self, scan, scan_type, engagement, lead, environment, active, verified, tags=None, minimum_severity=None,
user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None,
commit_hash=None, push_to_jira=None, close_old_findings=False, group_by=None, api_scan_configuration=None,
service=None, title=None):
service=None, title=None, create_finding_groups_for_all_findings=True):

logger.debug(f'IMPORT_SCAN: parameters: {locals()}')

Expand Down Expand Up @@ -307,7 +324,8 @@ def import_scan(self, scan, scan_type, engagement, lead, environment, active, ve
result = self.process_parsed_findings(test, findings_list, scan_type, user, active,
verified, minimum_severity=minimum_severity,
endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira,
group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False)
group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False,
create_finding_groups_for_all_findings=create_finding_groups_for_all_findings)
# Since I dont want to wait until the task is done right now, save the id
# So I can check on the task later
results_list += [result]
Expand All @@ -324,7 +342,8 @@ def import_scan(self, scan, scan_type, engagement, lead, environment, active, ve
new_findings = self.process_parsed_findings(test, parsed_findings, scan_type, user, active,
verified, minimum_severity=minimum_severity,
endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira,
group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True)
group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True,
create_finding_groups_for_all_findings=create_finding_groups_for_all_findings)

closed_findings = []
if close_old_findings:
Expand Down