Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions cycode/cli/commands/auth/auth_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,20 @@ def __init__(self) -> None:
self.auth_client = AuthClient()

def authenticate(self) -> None:
logger.debug('generating pkce code pair')
logger.debug('Generating PKCE code pair')
code_challenge, code_verifier = self._generate_pkce_code_pair()

logger.debug('starting authentication session')
logger.debug('Starting authentication session')
session_id = self.start_session(code_challenge)
logger.debug('authentication session created, %s', {'session_id': session_id})
logger.debug('Authentication session created, %s', {'session_id': session_id})

logger.debug('opening browser and redirecting to cycode login page')
logger.debug('Opening browser and redirecting to Cycode login page')
self.redirect_to_login_page(code_challenge, session_id)

logger.debug('starting get api token process')
logger.debug('Getting API token')
api_token = self.get_api_token(session_id, code_verifier)

logger.debug('saving get api token')
logger.debug('Saving API token')
self.save_api_token(api_token)

def start_session(self, code_challenge: str) -> str:
Expand All @@ -56,20 +56,20 @@ def redirect_to_login_page(self, code_challenge: str, session_id: str) -> None:
def get_api_token(self, session_id: str, code_verifier: str) -> 'ApiToken':
api_token = self.get_api_token_polling(session_id, code_verifier)
if api_token is None:
raise AuthProcessError('getting api token is completed, but the token is missing')
raise AuthProcessError('API token pulling is completed, but the token is missing')
return api_token

def get_api_token_polling(self, session_id: str, code_verifier: str) -> 'ApiToken':
end_polling_time = time.time() + self.POLLING_TIMEOUT_IN_SECONDS
while time.time() < end_polling_time:
logger.debug('trying to get api token...')
logger.debug('Trying to get API token...')
api_token_polling_response = self.auth_client.get_api_token(session_id, code_verifier)
if self._is_api_token_process_completed(api_token_polling_response):
logger.debug('get api token process completed')
logger.debug('Got API token process completion response')
return api_token_polling_response.api_token
if self._is_api_token_process_failed(api_token_polling_response):
logger.debug('get api token process failed')
raise AuthProcessError('error during getting api token')
logger.debug('Got API token process failure response')
raise AuthProcessError('Error while obtaining API token')
time.sleep(self.POLLING_WAIT_INTERVAL_IN_SECONDS)

raise AuthProcessError('session expired')
Expand Down
2 changes: 1 addition & 1 deletion cycode/cli/commands/report/sbom/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def send_report_feedback(

client.report_status(report_execution_id, scan_status)
except Exception as e:
logger.debug(f'Failed to send report feedback: {e}')
logger.debug('Failed to send report feedback', exc_info=e)


def create_sbom_report(
Expand Down
66 changes: 37 additions & 29 deletions cycode/cli/commands/scan/code_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def _scan_batch_thread_func(batch: List[Document]) -> Tuple[str, CliError, Local
scan_id = local_scan_result.scan_id

logger.debug(
'Finished scan process, %s',
'Processing scan results, %s',
{
'all_violations_count': detections_count,
'relevant_violations_count': relevant_detections_count,
Expand Down Expand Up @@ -246,14 +246,14 @@ def scan_commit_range(

repo = Repo(path)
total_commits_count = int(repo.git.rev_list('--count', commit_range))
logger.debug(f'Calculating diffs for {total_commits_count} commits in the commit range {commit_range}')
logger.debug('Calculating diffs for %s commits in the commit range %s', total_commits_count, commit_range)

progress_bar.set_section_length(ScanProgressBarSection.PREPARE_LOCAL_FILES, total_commits_count)

scanned_commits_count = 0
for commit in repo.iter_commits(rev=commit_range):
if _does_reach_to_max_commits_to_scan_limit(commit_ids_to_scan, max_commits_count):
logger.debug(f'Reached to max commits to scan count. Going to scan only {max_commits_count} last commits')
logger.debug('Reached to max commits to scan count. Going to scan only %s last commits', max_commits_count)
progress_bar.update(ScanProgressBarSection.PREPARE_LOCAL_FILES, total_commits_count - scanned_commits_count)
break

Expand Down Expand Up @@ -284,7 +284,7 @@ def scan_commit_range(
scanned_commits_count += 1

logger.debug('List of commit ids to scan, %s', {'commit_ids': commit_ids_to_scan})
logger.debug('Starting to scan commit range (It may take a few minutes)')
logger.debug('Starting to scan commit range (it may take a few minutes)')

scan_documents(context, documents_to_scan, is_git_diff=True, is_commit_range=True)
return None
Expand All @@ -307,7 +307,8 @@ def scan_documents(
ConsolePrinter(context).print_error(
CliError(
code='no_relevant_files',
message='Error: The scan could not be completed - relevant files to scan are not found.',
message='Error: The scan could not be completed - relevant files to scan are not found. '
'Enable verbose mode to see more details.',
)
)
return
Expand Down Expand Up @@ -392,7 +393,7 @@ def scan_commit_range_documents(
scan_id = local_scan_result.scan_id

logger.debug(
'Finished scan process, %s',
'Processing commit range scan results, %s',
{
'all_violations_count': detections_count,
'relevant_violations_count': relevant_detections_count,
Expand Down Expand Up @@ -475,7 +476,7 @@ def perform_scan_async(
scan_parameters: dict,
) -> ZippedFileScanResult:
scan_async_result = cycode_client.zipped_file_scan_async(zipped_documents, scan_type, scan_parameters)
logger.debug('scan request has been triggered successfully, scan id: %s', scan_async_result.scan_id)
logger.debug('Async scan request has been triggered successfully, %s', {'scan_id': scan_async_result.scan_id})

return poll_scan_results(
cycode_client,
Expand All @@ -492,7 +493,7 @@ def perform_scan_sync(
scan_parameters: dict,
) -> ZippedFileScanResult:
scan_results = cycode_client.zipped_file_scan_sync(zipped_documents, scan_type, scan_parameters)
logger.debug('scan request has been triggered successfully, scan id: %s', scan_results.id)
logger.debug('Sync scan request has been triggered successfully, %s', {'scan_id': scan_results.id})
return ZippedFileScanResult(
did_detect=True,
detections_per_file=_map_detections_per_file(scan_results.detection_messages),
Expand All @@ -512,7 +513,9 @@ def perform_commit_range_scan_async(
from_commit_zipped_documents, to_commit_zipped_documents, scan_type, scan_parameters
)

logger.debug('scan request has been triggered successfully, scan id: %s', scan_async_result.scan_id)
logger.debug(
'Async commit range scan request has been triggered successfully, %s', {'scan_id': scan_async_result.scan_id}
)
return poll_scan_results(
cycode_client, scan_async_result.scan_id, scan_type, scan_parameters.get('report'), timeout
)
Expand Down Expand Up @@ -552,11 +555,12 @@ def poll_scan_results(


def print_debug_scan_details(scan_details_response: 'ScanDetailsResponse') -> None:
logger.debug(f'Scan update: (scan_id: {scan_details_response.id})')
logger.debug(f'Scan status: {scan_details_response.scan_status}')
logger.debug(
'Scan update, %s', {'scan_id': scan_details_response.id, 'scan_status': scan_details_response.scan_status}
)

if scan_details_response.message:
logger.debug(f'Scan message: {scan_details_response.message}')
logger.debug('Scan message: %s', scan_details_response.message)


def print_results(
Expand All @@ -569,14 +573,16 @@ def print_results(
def get_document_detections(
scan_result: ZippedFileScanResult, documents_to_scan: List[Document]
) -> List[DocumentDetections]:
logger.debug('Get document detections')
logger.debug('Getting document detections')

document_detections = []
for detections_per_file in scan_result.detections_per_file:
file_name = get_path_by_os(detections_per_file.file_name)
commit_id = detections_per_file.commit_id

logger.debug('Going to find document of violated file, %s', {'file_name': file_name, 'commit_id': commit_id})
logger.debug(
'Going to find the document of the violated file., %s', {'file_name': file_name, 'commit_id': commit_id}
)

document = _get_document_by_file_name(documents_to_scan, file_name, commit_id)
document_detections.append(DocumentDetections(document=document, detections=detections_per_file.detections))
Expand Down Expand Up @@ -659,10 +665,10 @@ def get_scan_parameters(context: click.Context, paths: Tuple[str]) -> dict:
def try_get_git_remote_url(path: str) -> Optional[str]:
try:
remote_url = Repo(path).remotes[0].config_reader.get('url')
logger.debug(f'Found Git remote URL "{remote_url}" in path "{path}"')
logger.debug('Found Git remote URL, %s', {'remote_url': remote_url, 'path': path})
return remote_url
except Exception as e:
logger.debug('Failed to get git remote URL. %s', {'exception_message': str(e)})
logger.debug('Failed to get Git remote URL', exc_info=e)
return None


Expand Down Expand Up @@ -719,15 +725,15 @@ def _should_exclude_detection(detection: Detection, exclusions: Dict) -> bool:
exclusions_by_value = exclusions.get(consts.EXCLUSIONS_BY_VALUE_SECTION_NAME, [])
if _is_detection_sha_configured_in_exclusions(detection, exclusions_by_value):
logger.debug(
'Going to ignore violations because is in the values to ignore list, %s',
{'sha': detection.detection_details.get('sha512', '')},
'Going to ignore violations because they are on the values-to-ignore list, %s',
{'value_sha': detection.detection_details.get('sha512', '')},
)
return True

exclusions_by_sha = exclusions.get(consts.EXCLUSIONS_BY_SHA_SECTION_NAME, [])
if _is_detection_sha_configured_in_exclusions(detection, exclusions_by_sha):
logger.debug(
'Going to ignore violations because is in the shas to ignore list, %s',
'Going to ignore violations because they are on the SHA ignore list, %s',
{'sha': detection.detection_details.get('sha512', '')},
)
return True
Expand All @@ -737,7 +743,7 @@ def _should_exclude_detection(detection: Detection, exclusions: Dict) -> bool:
detection_rule = detection.detection_rule_id
if detection_rule in exclusions_by_rule:
logger.debug(
'Going to ignore violations because is in the shas to ignore list, %s',
'Going to ignore violations because they are on the Rule ID ignore list, %s',
{'detection_rule': detection_rule},
)
return True
Expand All @@ -747,7 +753,7 @@ def _should_exclude_detection(detection: Detection, exclusions: Dict) -> bool:
package = _get_package_name(detection)
if package in exclusions_by_package:
logger.debug(
'Going to ignore violations because is in the packages to ignore list, %s', {'package': package}
'Going to ignore violations because they are on the packages-to-ignore list, %s', {'package': package}
)
return True

Expand Down Expand Up @@ -810,7 +816,7 @@ def _report_scan_status(

cycode_client.report_scan_status(scan_type, scan_id, scan_status, should_use_scan_service)
except Exception as e:
logger.debug('Failed to report scan status, %s', {'exception_message': str(e)})
logger.debug('Failed to report scan status', exc_info=e)


def _generate_unique_id() -> UUID:
Expand Down Expand Up @@ -868,7 +874,7 @@ def _try_get_report_url_if_needed(
report_url_response = cycode_client.get_scan_report_url(scan_id, scan_type)
return report_url_response.report_url
except Exception as e:
logger.debug('Failed to get report url: %s', str(e))
logger.debug('Failed to get report URL', exc_info=e)


def wait_for_detections_creation(
Expand All @@ -883,16 +889,18 @@ def wait_for_detections_creation(
while time.time() < end_polling_time:
scan_persisted_detections_count = cycode_client.get_scan_detections_count(scan_type, scan_id)
logger.debug(
f'Excepted {expected_detections_count} detections, got {scan_persisted_detections_count} detections '
f'({expected_detections_count - scan_persisted_detections_count} more; '
f'{round(end_polling_time - time.time())} seconds left)'
'Excepting %s detections, got %s detections (%s more; %s seconds left)',
expected_detections_count,
scan_persisted_detections_count,
expected_detections_count - scan_persisted_detections_count,
round(end_polling_time - time.time()),
)
if scan_persisted_detections_count == expected_detections_count:
return

time.sleep(consts.DETECTIONS_COUNT_VERIFICATION_WAIT_INTERVAL_IN_SECONDS)

logger.debug(f'{scan_persisted_detections_count} detections has been created')
logger.debug('%s detections has been created', scan_persisted_detections_count)
raise custom_exceptions.ScanAsyncError(
f'Failed to wait for detections to be created after {polling_timeout} seconds'
)
Expand All @@ -905,14 +913,14 @@ def _map_detections_per_file(detections: List[dict]) -> List[DetectionsPerFile]:
detection['message'] = detection['correlation_message']
file_name = _get_file_name_from_detection(detection)
if file_name is None:
logger.debug('file name is missing from detection with id %s', detection.get('id'))
logger.debug('File name is missing from detection with ID %s', detection.get('id'))
continue
if detections_per_files.get(file_name) is None:
detections_per_files[file_name] = [DetectionSchema().load(detection)]
else:
detections_per_files[file_name].append(DetectionSchema().load(detection))
except Exception as e:
logger.debug('Failed to parse detection: %s', str(e))
logger.debug('Failed to parse detection', exc_info=e)
continue

return [
Expand Down
9 changes: 5 additions & 4 deletions cycode/cli/commands/scan/pre_receive/pre_receive_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ def pre_receive_command(context: click.Context, ignored_args: List[str]) -> None

if should_skip_pre_receive_scan():
logger.info(
'A scan has been skipped as per your request.'
' Please note that this may leave your system vulnerable to secrets that have not been detected'
'A scan has been skipped as per your request. '
'Please note that this may leave your system vulnerable to secrets that have not been detected.'
)
return

if is_verbose_mode_requested_in_pre_receive_scan():
enable_verbose_mode(context)
logger.debug('Verbose mode enabled, all log levels will be displayed')
logger.debug('Verbose mode enabled: all log levels will be displayed.')

command_scan_type = context.info_name
timeout = configuration_manager.get_pre_receive_command_timeout(command_scan_type)
Expand All @@ -51,7 +51,8 @@ def pre_receive_command(context: click.Context, ignored_args: List[str]) -> None
commit_range = calculate_pre_receive_commit_range(branch_update_details)
if not commit_range:
logger.info(
'No new commits found for pushed branch, %s', {'branch_update_details': branch_update_details}
'No new commits found for pushed branch, %s',
{'branch_update_details': branch_update_details},
)
return

Expand Down
Loading