Skip to content

Added a feature to list all projects and team names in report #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions shiftleft-utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,50 @@ def get_all_apps(org_id):
)
return None

def get_all_users(org_id):
"""Return all the teams for the given organization"""
list_users_url = f"https://{config.SHIFTLEFT_API_HOST}/api/v4/orgs/{org_id}/rbac/users"
r = requests.get(list_users_url, headers=headers)
if r.ok:
raw_response = r.json()
if raw_response and raw_response.get("response"):
teams_list = raw_response.get("response")
return teams_list
else:
print(
f"Unable to retrieve users list for the organization {org_id} due to {r.status_code} error"
)
return None

def get_all_teams(org_id):
"""Return all the teams for the given organization"""
list_teams_url = f"https://{config.SHIFTLEFT_API_HOST}/api/v4/orgs/{org_id}/rbac/teams"
r = requests.get(list_teams_url, headers=headers)
if r.ok:
raw_response = r.json()
if raw_response and raw_response.get("response"):
teams_list = raw_response.get("response")
return teams_list
else:
print(
f"Unable to retrieve teams list for the organization {org_id} due to {r.status_code} error"
)
return None

def get_team_members(org_id, team_id):
"""Return all the teams for the given organization"""
list_team_members_url = f"https://{config.SHIFTLEFT_API_HOST}/api/v4/orgs/{org_id}/rbac/teams/{team_id}"
r = requests.get(list_team_members_url, headers=headers)
if r.ok:
raw_response = r.json()
if raw_response and raw_response.get("response"):
teams_list = raw_response.get("response")
return teams_list
else:
print(
f"Unable to retrieve team members list for the organization {team_id} due to {r.status_code} error"
)
return None

def get_all_findings(org_id, app_name, version):
"""Method to retrieve all findings"""
Expand Down
5 changes: 4 additions & 1 deletion shiftleft-utils/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,8 @@ def build_args():
dest="app_name",
help="App name",
default=config.SHIFTLEFT_APP,
action='append',
nargs='+',
)
parser.add_argument(
"-o",
Expand Down Expand Up @@ -438,7 +440,8 @@ def build_args():
args = build_args()
app_list = []
if args.app_name:
app_list.append({"id": args.app_name, "name": args.app_name})
for eachApp in args.app_name:
app_list.append({"id": eachApp[0], "name": eachApp[0]})
report_file = args.report_file
reports_dir = args.reports_dir
format = args.format
Expand Down
116 changes: 87 additions & 29 deletions shiftleft-utils/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
get_findings_counts_url,
get_findings_url,
get_scan_run,
get_all_teams,
get_team_members,
get_all_users,
headers,
)

Expand All @@ -37,11 +40,12 @@ def to_arr(counts_dict):


def process_app(
progress, task, org_id, report_file, app, detailed, branch, include_run_info
progress, task, org_id, report_file, app, detailed, branch, include_run_info, include_app_apps, teams_list, user_dict,
):
start = time.time()
app_id = app.get("id")
app_name = app.get("name")
isActive = True
# Stats only considers the first page for performance so the detailed report is based only on the latest 250 findings
# The various counts, however, are based on the full list of findings so are correct
findings_url = (
Expand All @@ -64,10 +68,12 @@ def process_app(
scan = response.get("scan")
# Scan will be None if there are any issues/errors
if not scan:
isActive = False
console.print(
f"""\nINFO: No scans found for {app_name} {branch if branch else ""}"""
)
return []
if not include_app_apps:
return []
run_info = {}
token_name = ""
if include_run_info:
Expand All @@ -81,15 +87,18 @@ def process_app(
)
tags = app.get("tags")
app_group = ""
app_branch = scan.get("tags", {}).get("branch", "")
app_branch = ""
if isActive:
app_branch = scan.get("tags", {}).get("branch", "")
if tags:
for tag in tags:
if tag.get("key") == "group":
app_group = tag.get("value")
break
# Other unused properties such as findings or counts
spid = scan.get("internal_id")
projectSpId = f'sl/{org_id}/{scan.get("app")}'
if isActive:
# Other unused properties such as findings or counts
spid = scan.get("internal_id")
projectSpId = f'sl/{org_id}/{scan.get("app")}'
counts = response.get("counts", [])
findings = response.get("findings", [])
vuln_counts = [
Expand Down Expand Up @@ -256,35 +265,64 @@ def process_app(
ml_assisted_count += vc["count"]
# Convert date time to BigQuery friendly format
completed_at = ""
try:
ctime = scan.get("completed_at", "")
completed_at_dt = datetime.strptime(
ctime,
"%Y-%m-%dT%H:%M:%S.%fZ %Z"
if "UTC" in ctime
else "%Y-%m-%dT%H:%M:%S.%fZ",
)
completed_at = completed_at_dt.strftime("%Y-%m-%d %H:%M:%S.%f")
except Exception as e:
completed_at = (
scan.get("completed_at", "")
.replace(" UTC", "")
.replace("Z", "")
.replace("T", " ")
)
if isActive:
try:
ctime = scan.get("completed_at", "")
completed_at_dt = datetime.strptime(
ctime,
"%Y-%m-%dT%H:%M:%S.%fZ %Z"
if "UTC" in ctime
else "%Y-%m-%dT%H:%M:%S.%fZ",
)
completed_at = completed_at_dt.strftime("%Y-%m-%d %H:%M:%S.%f")
except Exception as e:
completed_at = (
scan.get("completed_at", "")
.replace(" UTC", "")
.replace("Z", "")
.replace("T", " ")
)
progress.update(
task,
description=f"""Processed [bold]{app.get("name")}[/bold] in {math.ceil(time.time() - start)} seconds""",
)
if isActive:
appName = scan.get("app")
appVersion = scan.get("version")
scanID = scan.get("id")
scanLang = scan.get("language")
scanExp = scan.get("number_of_expressions")
else:
appName = app_name
appVersion = ""
scanID = ""
scanLang = ""
scanExp = ""
appTeam = ""
teamAdmins = ""
for eachTeam in teams_list:
if eachTeam.get("projects"):
if appName in eachTeam.get("projects"):
appTeam = eachTeam.get("team_name")
teamMembers = get_team_members(org_id, eachTeam.get("team_id")).get("members")
if teamMembers:
for eachMember in teamMembers:
memberRoleinTeam = eachMember.get("team_role_aliases")
if "TEAM_MANAGER" in memberRoleinTeam or "TEAM_ADMIN" in memberRoleinTeam:
teamAdmins = str(user_dict[eachMember.get('user_id_v2')]) + ", " + teamAdmins

return [
scan.get("app"),
appName,
app_group,
appTeam,
teamAdmins.rstrip(', '),
isActive,
app_branch,
scan.get("version"),
appVersion,
completed_at,
scan.get("id"),
scan.get("language"),
scan.get("number_of_expressions"),
scanID,
scanLang,
scanExp,
ml_assisted_count,
critical_count,
high_count,
Expand Down Expand Up @@ -325,6 +363,9 @@ def write_to_csv(report_file, row):
csv_cols = [
"App",
"App Group",
"Team Name",
"Team Admins",
"ActiveApp",
"Branch",
"Version",
"Last Scan",
Expand Down Expand Up @@ -369,9 +410,16 @@ def write_to_csv(report_file, row):
reportwriter.writerow(row)


def collect_stats_parallel(org_id, report_file, detailed, branch, include_run_info):
def collect_stats_parallel(org_id, report_file, detailed, branch, include_run_info, include_all_apps):
"""Method to collect stats for all apps to a csv"""
apps_list = get_all_apps(org_id)
teams_list = get_all_teams(org_id)
users_list = get_all_users(org_id)

user_dict = {}
for eachUser in users_list:
user_dict[eachUser.get("id_v2")] = eachUser.get("email")

if not apps_list:
console.print("No apps were found in this organization")
return
Expand Down Expand Up @@ -405,6 +453,9 @@ def collect_stats_parallel(org_id, report_file, detailed, branch, include_run_in
detailed,
branch,
include_run_info,
include_all_apps,
teams_list,
user_dict,
)
rows.append(row)
rows = dask.compute(*rows)
Expand Down Expand Up @@ -446,6 +497,13 @@ def build_args():
help="Run info includes runtime information, tokens and scan statistics",
default=False,
)
parser.add_argument(
"--include-all-apps",
action="store_true",
dest="include_app_apps",
help="Run info includes data for all apps including app placeholders",
default=False,
)
return parser.parse_args()


Expand All @@ -466,7 +524,7 @@ def main():
args = build_args()
report_file = args.report_file
collect_stats_parallel(
org_id, report_file, args.detailed, args.branch, args.include_run_info
org_id, report_file, args.detailed, args.branch, args.include_run_info, args.include_app_apps
)


Expand Down