@@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
54
54
return start;
55
55
}
56
56
57
- bool CompareSharedFunctionInfo (SharedFunctionInfo a, SharedFunctionInfo b) {
58
- int a_start = StartPosition (a);
59
- int b_start = StartPosition (b);
60
- if (a_start == b_start) return a.EndPosition () > b.EndPosition ();
61
- return a_start < b_start;
62
- }
63
-
64
57
bool CompareCoverageBlock (const CoverageBlock& a, const CoverageBlock& b) {
65
58
DCHECK_NE (kNoSourcePosition , a.start );
66
59
DCHECK_NE (kNoSourcePosition , b.start );
@@ -481,32 +474,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
481
474
// Reset all counters on the DebugInfo to zero.
482
475
ResetAllBlockCounts (info);
483
476
}
484
- } // anonymous namespace
485
-
486
- std::unique_ptr<Coverage> Coverage::CollectPrecise (Isolate* isolate) {
487
- DCHECK (!isolate->is_best_effort_code_coverage ());
488
- std::unique_ptr<Coverage> result =
489
- Collect (isolate, isolate->code_coverage_mode ());
490
- if (!isolate->is_collecting_type_profile () &&
491
- (isolate->is_precise_binary_code_coverage () ||
492
- isolate->is_block_binary_code_coverage ())) {
493
- // We do not have to hold onto feedback vectors for invocations we already
494
- // reported. So we can reset the list.
495
- isolate->SetFeedbackVectorsForProfilingTools (*ArrayList::New (isolate, 0 ));
496
- }
497
- return result;
498
- }
499
-
500
- std::unique_ptr<Coverage> Coverage::CollectBestEffort (Isolate* isolate) {
501
- return Collect (isolate, v8::debug::CoverageMode::kBestEffort );
502
- }
503
-
504
- std::unique_ptr<Coverage> Coverage::Collect (
505
- Isolate* isolate, v8::debug::CoverageMode collectionMode) {
506
- SharedToCounterMap counter_map;
507
477
478
+ void CollectAndMaybeResetCounts (Isolate* isolate,
479
+ SharedToCounterMap* counter_map,
480
+ v8::debug::CoverageMode coverage_mode) {
508
481
const bool reset_count =
509
- collectionMode != v8::debug::CoverageMode::kBestEffort ;
482
+ coverage_mode != v8::debug::CoverageMode::kBestEffort ;
510
483
511
484
switch (isolate->code_coverage_mode ()) {
512
485
case v8::debug::CoverageMode::kBlockBinary :
@@ -525,15 +498,15 @@ std::unique_ptr<Coverage> Coverage::Collect(
525
498
DCHECK (shared.IsSubjectToDebugging ());
526
499
uint32_t count = static_cast <uint32_t >(vector.invocation_count ());
527
500
if (reset_count) vector.clear_invocation_count ();
528
- counter_map. Add (shared, count);
501
+ counter_map-> Add (shared, count);
529
502
}
530
503
break ;
531
504
}
532
505
case v8::debug::CoverageMode::kBestEffort : {
533
506
DCHECK (!isolate->factory ()
534
507
->feedback_vectors_for_profiling_tools ()
535
508
->IsArrayList ());
536
- DCHECK_EQ (v8::debug::CoverageMode::kBestEffort , collectionMode );
509
+ DCHECK_EQ (v8::debug::CoverageMode::kBestEffort , coverage_mode );
537
510
HeapIterator heap_iterator (isolate->heap ());
538
511
for (HeapObject current_obj = heap_iterator.next ();
539
512
!current_obj.is_null (); current_obj = heap_iterator.next ()) {
@@ -542,8 +515,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
542
515
SharedFunctionInfo shared = func.shared ();
543
516
if (!shared.IsSubjectToDebugging ()) continue ;
544
517
if (!(func.has_feedback_vector () ||
545
- func.has_closure_feedback_cell_array ()))
518
+ func.has_closure_feedback_cell_array ())) {
546
519
continue ;
520
+ }
547
521
uint32_t count = 0 ;
548
522
if (func.has_feedback_vector ()) {
549
523
count =
@@ -554,7 +528,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
554
528
// atleast once. We don't have precise invocation count here.
555
529
count = 1 ;
556
530
}
557
- counter_map. Add (shared, count);
531
+ counter_map-> Add (shared, count);
558
532
}
559
533
560
534
// Also check functions on the stack to collect the count map. With lazy
@@ -563,12 +537,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
563
537
// updated (i.e. it didn't execute return / jump).
564
538
for (JavaScriptFrameIterator it (isolate); !it.done (); it.Advance ()) {
565
539
SharedFunctionInfo shared = it.frame ()->function ().shared ();
566
- if (counter_map. Get (shared) != 0 ) continue ;
567
- counter_map. Add (shared, 1 );
540
+ if (counter_map-> Get (shared) != 0 ) continue ;
541
+ counter_map-> Add (shared, 1 );
568
542
}
569
543
break ;
570
544
}
571
545
}
546
+ }
547
+
548
+ // A {SFI, count} tuple is used to sort by source range (stored on
549
+ // the SFI) and call count (in the counter map).
550
+ struct SharedFunctionInfoAndCount {
551
+ SharedFunctionInfoAndCount (SharedFunctionInfo info, uint32_t count)
552
+ : info(info),
553
+ count (count),
554
+ start(StartPosition(info)),
555
+ end(info.EndPosition()) {}
556
+
557
+ // Sort by:
558
+ // - start, ascending.
559
+ // - end, descending.
560
+ // - count, ascending.
561
+ bool operator <(const SharedFunctionInfoAndCount& that) const {
562
+ if (this ->start != that.start ) return this ->start < that.start ;
563
+ if (this ->end != that.end ) return this ->end > that.end ;
564
+ return this ->count < that.count ;
565
+ }
566
+
567
+ SharedFunctionInfo info;
568
+ uint32_t count;
569
+ int start;
570
+ int end;
571
+ };
572
+
573
+ } // anonymous namespace
574
+
575
+ std::unique_ptr<Coverage> Coverage::CollectPrecise (Isolate* isolate) {
576
+ DCHECK (!isolate->is_best_effort_code_coverage ());
577
+ std::unique_ptr<Coverage> result =
578
+ Collect (isolate, isolate->code_coverage_mode ());
579
+ if (!isolate->is_collecting_type_profile () &&
580
+ (isolate->is_precise_binary_code_coverage () ||
581
+ isolate->is_block_binary_code_coverage ())) {
582
+ // We do not have to hold onto feedback vectors for invocations we already
583
+ // reported. So we can reset the list.
584
+ isolate->SetFeedbackVectorsForProfilingTools (*ArrayList::New (isolate, 0 ));
585
+ }
586
+ return result;
587
+ }
588
+
589
+ std::unique_ptr<Coverage> Coverage::CollectBestEffort (Isolate* isolate) {
590
+ return Collect (isolate, v8::debug::CoverageMode::kBestEffort );
591
+ }
592
+
593
+ std::unique_ptr<Coverage> Coverage::Collect (
594
+ Isolate* isolate, v8::debug::CoverageMode collectionMode) {
595
+ // Collect call counts for all functions.
596
+ SharedToCounterMap counter_map;
597
+ CollectAndMaybeResetCounts (isolate, &counter_map, collectionMode);
572
598
573
599
// Iterate shared function infos of every script and build a mapping
574
600
// between source ranges and invocation counts.
@@ -583,30 +609,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
583
609
result->emplace_back (script_handle);
584
610
std::vector<CoverageFunction>* functions = &result->back ().functions ;
585
611
586
- std::vector<SharedFunctionInfo > sorted;
612
+ std::vector<SharedFunctionInfoAndCount > sorted;
587
613
588
614
{
589
615
// Sort functions by start position, from outer to inner functions.
590
616
SharedFunctionInfo::ScriptIterator infos (isolate, *script_handle);
591
617
for (SharedFunctionInfo info = infos.Next (); !info.is_null ();
592
618
info = infos.Next ()) {
593
- sorted.push_back (info);
619
+ sorted.emplace_back (info, counter_map. Get (info) );
594
620
}
595
- std::sort (sorted.begin (), sorted.end (), CompareSharedFunctionInfo );
621
+ std::sort (sorted.begin (), sorted.end ());
596
622
}
597
623
598
624
// Stack to track nested functions, referring function by index.
599
625
std::vector<size_t > nesting;
600
626
601
627
// Use sorted list to reconstruct function nesting.
602
- for (SharedFunctionInfo info : sorted) {
603
- int start = StartPosition (info);
604
- int end = info.EndPosition ();
605
- uint32_t count = counter_map.Get (info);
628
+ for (const SharedFunctionInfoAndCount& v : sorted) {
629
+ SharedFunctionInfo info = v.info ;
630
+ int start = v.start ;
631
+ int end = v.end ;
632
+ uint32_t count = v.count ;
633
+
606
634
// Find the correct outer function based on start position.
635
+ //
636
+ // This is not robust when considering two functions with identical source
637
+ // ranges. In this case, it is unclear which function is the inner / outer
638
+ // function. Above, we ensure that such functions are sorted in ascending
639
+ // `count` order, so at least our `parent_is_covered` optimization below
640
+ // should be fine.
641
+ // TODO(jgruber): Consider removing the optimization.
607
642
while (!nesting.empty () && functions->at (nesting.back ()).end <= start) {
608
643
nesting.pop_back ();
609
644
}
645
+
610
646
if (count != 0 ) {
611
647
switch (collectionMode) {
612
648
case v8::debug::CoverageMode::kBlockCount :
0 commit comments