Skip to content

Commit 7a4ae1a

Browse files
Benjamin CoeBridgeAR
authored andcommitted
deps: V8: cherry-pick 597f885
Original commit message: [coverage] Deterministically sort collected shared function infos Prior to this CL, collected shared function infos with identical source ranges were sorted non-deterministically during coverage collection. This lead to non-deterministically incorrectly-reported coverage due to an optimization which depended on the sort order later on. With this CL, we now sort shared function infos by the source range *and* call count. Bug: v8:6000,v8:9212 Change-Id: If8bf900727591e71dbd0df621e472a4303f3a353 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1771776 Reviewed-by: Yang Guo <yangguo@chromium.org> Commit-Queue: Jakob Gruber <jgruber@chromium.org> Cr-Commit-Position: refs/heads/master@{#63411} Refs: v8/v8@597f885 PR-URL: #29367 Reviewed-By: Rich Trott <rtrott@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Michael Dawson <michael_dawson@ca.ibm.com> Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
1 parent 8c776cb commit 7a4ae1a

File tree

2 files changed

+81
-45
lines changed

2 files changed

+81
-45
lines changed

common.gypi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838

3939
# Reset this number to 0 on major V8 upgrades.
4040
# Increment by one for each non-official patch applied to deps/v8.
41-
'v8_embedder_string': '-node.15',
41+
'v8_embedder_string': '-node.16',
4242

4343
##### V8 defaults for Node.js #####
4444

deps/v8/src/debug/debug-coverage.cc

Lines changed: 80 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) {
5454
return start;
5555
}
5656

57-
bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
58-
int a_start = StartPosition(a);
59-
int b_start = StartPosition(b);
60-
if (a_start == b_start) return a.EndPosition() > b.EndPosition();
61-
return a_start < b_start;
62-
}
63-
6457
bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
6558
DCHECK_NE(kNoSourcePosition, a.start);
6659
DCHECK_NE(kNoSourcePosition, b.start);
@@ -481,32 +474,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
481474
// Reset all counters on the DebugInfo to zero.
482475
ResetAllBlockCounts(info);
483476
}
484-
} // anonymous namespace
485-
486-
std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
487-
DCHECK(!isolate->is_best_effort_code_coverage());
488-
std::unique_ptr<Coverage> result =
489-
Collect(isolate, isolate->code_coverage_mode());
490-
if (!isolate->is_collecting_type_profile() &&
491-
(isolate->is_precise_binary_code_coverage() ||
492-
isolate->is_block_binary_code_coverage())) {
493-
// We do not have to hold onto feedback vectors for invocations we already
494-
// reported. So we can reset the list.
495-
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
496-
}
497-
return result;
498-
}
499-
500-
std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
501-
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
502-
}
503-
504-
std::unique_ptr<Coverage> Coverage::Collect(
505-
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
506-
SharedToCounterMap counter_map;
507477

478+
void CollectAndMaybeResetCounts(Isolate* isolate,
479+
SharedToCounterMap* counter_map,
480+
v8::debug::CoverageMode coverage_mode) {
508481
const bool reset_count =
509-
collectionMode != v8::debug::CoverageMode::kBestEffort;
482+
coverage_mode != v8::debug::CoverageMode::kBestEffort;
510483

511484
switch (isolate->code_coverage_mode()) {
512485
case v8::debug::CoverageMode::kBlockBinary:
@@ -525,15 +498,15 @@ std::unique_ptr<Coverage> Coverage::Collect(
525498
DCHECK(shared.IsSubjectToDebugging());
526499
uint32_t count = static_cast<uint32_t>(vector.invocation_count());
527500
if (reset_count) vector.clear_invocation_count();
528-
counter_map.Add(shared, count);
501+
counter_map->Add(shared, count);
529502
}
530503
break;
531504
}
532505
case v8::debug::CoverageMode::kBestEffort: {
533506
DCHECK(!isolate->factory()
534507
->feedback_vectors_for_profiling_tools()
535508
->IsArrayList());
536-
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
509+
DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
537510
HeapIterator heap_iterator(isolate->heap());
538511
for (HeapObject current_obj = heap_iterator.next();
539512
!current_obj.is_null(); current_obj = heap_iterator.next()) {
@@ -542,8 +515,9 @@ std::unique_ptr<Coverage> Coverage::Collect(
542515
SharedFunctionInfo shared = func.shared();
543516
if (!shared.IsSubjectToDebugging()) continue;
544517
if (!(func.has_feedback_vector() ||
545-
func.has_closure_feedback_cell_array()))
518+
func.has_closure_feedback_cell_array())) {
546519
continue;
520+
}
547521
uint32_t count = 0;
548522
if (func.has_feedback_vector()) {
549523
count =
@@ -554,7 +528,7 @@ std::unique_ptr<Coverage> Coverage::Collect(
554528
// atleast once. We don't have precise invocation count here.
555529
count = 1;
556530
}
557-
counter_map.Add(shared, count);
531+
counter_map->Add(shared, count);
558532
}
559533

560534
// Also check functions on the stack to collect the count map. With lazy
@@ -563,12 +537,64 @@ std::unique_ptr<Coverage> Coverage::Collect(
563537
// updated (i.e. it didn't execute return / jump).
564538
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
565539
SharedFunctionInfo shared = it.frame()->function().shared();
566-
if (counter_map.Get(shared) != 0) continue;
567-
counter_map.Add(shared, 1);
540+
if (counter_map->Get(shared) != 0) continue;
541+
counter_map->Add(shared, 1);
568542
}
569543
break;
570544
}
571545
}
546+
}
547+
548+
// A {SFI, count} tuple is used to sort by source range (stored on
549+
// the SFI) and call count (in the counter map).
550+
struct SharedFunctionInfoAndCount {
551+
SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
552+
: info(info),
553+
count(count),
554+
start(StartPosition(info)),
555+
end(info.EndPosition()) {}
556+
557+
// Sort by:
558+
// - start, ascending.
559+
// - end, descending.
560+
// - count, ascending.
561+
bool operator<(const SharedFunctionInfoAndCount& that) const {
562+
if (this->start != that.start) return this->start < that.start;
563+
if (this->end != that.end) return this->end > that.end;
564+
return this->count < that.count;
565+
}
566+
567+
SharedFunctionInfo info;
568+
uint32_t count;
569+
int start;
570+
int end;
571+
};
572+
573+
} // anonymous namespace
574+
575+
std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
576+
DCHECK(!isolate->is_best_effort_code_coverage());
577+
std::unique_ptr<Coverage> result =
578+
Collect(isolate, isolate->code_coverage_mode());
579+
if (!isolate->is_collecting_type_profile() &&
580+
(isolate->is_precise_binary_code_coverage() ||
581+
isolate->is_block_binary_code_coverage())) {
582+
// We do not have to hold onto feedback vectors for invocations we already
583+
// reported. So we can reset the list.
584+
isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
585+
}
586+
return result;
587+
}
588+
589+
std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
590+
return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
591+
}
592+
593+
std::unique_ptr<Coverage> Coverage::Collect(
594+
Isolate* isolate, v8::debug::CoverageMode collectionMode) {
595+
// Collect call counts for all functions.
596+
SharedToCounterMap counter_map;
597+
CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
572598

573599
// Iterate shared function infos of every script and build a mapping
574600
// between source ranges and invocation counts.
@@ -583,30 +609,40 @@ std::unique_ptr<Coverage> Coverage::Collect(
583609
result->emplace_back(script_handle);
584610
std::vector<CoverageFunction>* functions = &result->back().functions;
585611

586-
std::vector<SharedFunctionInfo> sorted;
612+
std::vector<SharedFunctionInfoAndCount> sorted;
587613

588614
{
589615
// Sort functions by start position, from outer to inner functions.
590616
SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
591617
for (SharedFunctionInfo info = infos.Next(); !info.is_null();
592618
info = infos.Next()) {
593-
sorted.push_back(info);
619+
sorted.emplace_back(info, counter_map.Get(info));
594620
}
595-
std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
621+
std::sort(sorted.begin(), sorted.end());
596622
}
597623

598624
// Stack to track nested functions, referring function by index.
599625
std::vector<size_t> nesting;
600626

601627
// Use sorted list to reconstruct function nesting.
602-
for (SharedFunctionInfo info : sorted) {
603-
int start = StartPosition(info);
604-
int end = info.EndPosition();
605-
uint32_t count = counter_map.Get(info);
628+
for (const SharedFunctionInfoAndCount& v : sorted) {
629+
SharedFunctionInfo info = v.info;
630+
int start = v.start;
631+
int end = v.end;
632+
uint32_t count = v.count;
633+
606634
// Find the correct outer function based on start position.
635+
//
636+
// This is not robust when considering two functions with identical source
637+
// ranges. In this case, it is unclear which function is the inner / outer
638+
// function. Above, we ensure that such functions are sorted in ascending
639+
// `count` order, so at least our `parent_is_covered` optimization below
640+
// should be fine.
641+
// TODO(jgruber): Consider removing the optimization.
607642
while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
608643
nesting.pop_back();
609644
}
645+
610646
if (count != 0) {
611647
switch (collectionMode) {
612648
case v8::debug::CoverageMode::kBlockCount:

0 commit comments

Comments
 (0)