@@ -100,12 +100,17 @@ def _should_use_scan_service(scan_type: str, scan_parameters: dict) -> bool:
100100    return  scan_type  ==  consts .SECRET_SCAN_TYPE  and  scan_parameters .get ('report' ) is  True 
101101
102102
103- def  _should_use_sync_flow (scan_type : str , sync_option : bool , scan_parameters : Optional [dict ] =  None ) ->  bool :
103+ def  _should_use_sync_flow (
104+     command_scan_type : str , scan_type : str , sync_option : bool , scan_parameters : Optional [dict ] =  None 
105+ ) ->  bool :
104106    if  not  sync_option :
105107        return  False 
106108
107-     if  scan_type  not  in consts .SCA_SCAN_TYPE ,):
108-         raise  ValueError (f'Sync scan is not available for { scan_type }  )
109+     if  command_scan_type  not  in 'path' , 'repository' }:
110+         raise  ValueError (f'Sync flow is not available for "{ command_scan_type }  )
111+ 
112+     if  scan_type  is  consts .SAST_SCAN_TYPE :
113+         raise  ValueError ('Sync scan is not available for SAST scan type.' )
109114
110115    if  scan_parameters .get ('report' ) is  True :
111116        raise  ValueError ('You can not use sync flow with report option. Either remove "report" or "sync" option.' )
@@ -163,7 +168,7 @@ def _scan_batch_thread_func(batch: List[Document]) -> Tuple[str, CliError, Local
163168        scan_completed  =  False 
164169
165170        should_use_scan_service  =  _should_use_scan_service (scan_type , scan_parameters )
166-         should_use_sync_flow  =  _should_use_sync_flow (scan_type , sync_option , scan_parameters )
171+         should_use_sync_flow  =  _should_use_sync_flow (command_scan_type ,  scan_type , sync_option , scan_parameters )
167172
168173        try :
169174            logger .debug ('Preparing local files, %s' , {'batch_size' : len (batch )})
@@ -217,7 +222,7 @@ def _scan_batch_thread_func(batch: List[Document]) -> Tuple[str, CliError, Local
217222            zip_file_size ,
218223            command_scan_type ,
219224            error_message ,
220-             should_use_scan_service , 
225+             should_use_scan_service   or   should_use_sync_flow ,   # sync flow implies scan service 
221226        )
222227
223228        return  scan_id , error , local_scan_result 
@@ -359,6 +364,8 @@ def scan_commit_range_documents(
359364    scan_parameters : Optional [dict ] =  None ,
360365    timeout : Optional [int ] =  None ,
361366) ->  None :
367+     """Used by SCA only""" 
368+ 
362369    cycode_client  =  context .obj ['client' ]
363370    scan_type  =  context .obj ['scan_type' ]
364371    severity_threshold  =  context .obj ['severity_threshold' ]
@@ -484,7 +491,8 @@ def perform_scan(
484491    should_use_sync_flow : bool  =  False ,
485492) ->  ZippedFileScanResult :
486493    if  should_use_sync_flow :
487-         return  perform_scan_sync (cycode_client , zipped_documents , scan_type , scan_parameters )
494+         # it does not support commit range scans; should_use_sync_flow handles it 
495+         return  perform_scan_sync (cycode_client , zipped_documents , scan_type , scan_parameters , is_git_diff )
488496
489497    if  scan_type  in  (consts .SCA_SCAN_TYPE , consts .SAST_SCAN_TYPE ) or  should_use_scan_service :
490498        return  perform_scan_async (cycode_client , zipped_documents , scan_type , scan_parameters , is_commit_range )
@@ -520,12 +528,13 @@ def perform_scan_sync(
520528    zipped_documents : 'InMemoryZip' ,
521529    scan_type : str ,
522530    scan_parameters : dict ,
531+     is_git_diff : bool  =  False ,
523532) ->  ZippedFileScanResult :
524-     scan_results  =  cycode_client .zipped_file_scan_sync (zipped_documents , scan_type , scan_parameters )
533+     scan_results  =  cycode_client .zipped_file_scan_sync (zipped_documents , scan_type , scan_parameters ,  is_git_diff )
525534    logger .debug ('Sync scan request has been triggered successfully, %s' , {'scan_id' : scan_results .id })
526535    return  ZippedFileScanResult (
527536        did_detect = True ,
528-         detections_per_file = _map_detections_per_file_and_commit_id (scan_results .detection_messages ),
537+         detections_per_file = _map_detections_per_file_and_commit_id (scan_type ,  scan_results .detection_messages ),
529538        scan_id = scan_results .id ,
530539    )
531540
@@ -610,7 +619,7 @@ def get_document_detections(
610619        commit_id  =  detections_per_file .commit_id 
611620
612621        logger .debug (
613-             'Going to find the document of the violated file. , %s' , {'file_name' : file_name , 'commit_id' : commit_id }
622+             'Going to find the document of the violated file, %s' , {'file_name' : file_name , 'commit_id' : commit_id }
614623        )
615624
616625        document  =  _get_document_by_file_name (documents_to_scan , file_name , commit_id )
@@ -874,7 +883,7 @@ def _get_scan_result(
874883
875884    return  ZippedFileScanResult (
876885        did_detect = True ,
877-         detections_per_file = _map_detections_per_file_and_commit_id (scan_raw_detections ),
886+         detections_per_file = _map_detections_per_file_and_commit_id (scan_type ,  scan_raw_detections ),
878887        scan_id = scan_id ,
879888        report_url = _try_get_report_url_if_needed (cycode_client , should_get_report , scan_id , scan_type ),
880889    )
@@ -904,7 +913,7 @@ def _try_get_report_url_if_needed(
904913        logger .debug ('Failed to get report URL' , exc_info = e )
905914
906915
907- def  _map_detections_per_file_and_commit_id (raw_detections : List [dict ]) ->  List [DetectionsPerFile ]:
916+ def  _map_detections_per_file_and_commit_id (scan_type :  str ,  raw_detections : List [dict ]) ->  List [DetectionsPerFile ]:
908917    """Converts list of detections (async flow) to list of DetectionsPerFile objects (sync flow). 
909918
910919    Args: 
@@ -923,7 +932,7 @@ def _map_detections_per_file_and_commit_id(raw_detections: List[dict]) -> List[D
923932            # FIXME(MarshalX): investigate this field mapping 
924933            raw_detection ['message' ] =  raw_detection ['correlation_message' ]
925934
926-             file_name  =  _get_file_name_from_detection (raw_detection )
935+             file_name  =  _get_file_name_from_detection (scan_type ,  raw_detection )
927936            detection : Detection  =  DetectionSchema ().load (raw_detection )
928937            commit_id : Optional [str ] =  detection .detection_details .get ('commit_id' )  # could be None 
929938            group_by_key  =  (file_name , commit_id )
@@ -942,12 +951,10 @@ def _map_detections_per_file_and_commit_id(raw_detections: List[dict]) -> List[D
942951    ]
943952
944953
945- def  _get_file_name_from_detection (raw_detection : dict ) ->  str :
946-     category  =  raw_detection .get ('category' )
947- 
948-     if  category  ==  'SAST' :
954+ def  _get_file_name_from_detection (scan_type : str , raw_detection : dict ) ->  str :
955+     if  scan_type  ==  consts .SAST_SCAN_TYPE :
949956        return  raw_detection ['detection_details' ]['file_path' ]
950-     if  category  ==  'SecretDetection' :
957+     if  scan_type  ==  consts . SECRET_SCAN_TYPE :
951958        return  _get_secret_file_name_from_detection (raw_detection )
952959
953960    return  raw_detection ['detection_details' ]['file_name' ]
0 commit comments