1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/debug/debug-coverage.h"
6
7 #include "src/ast/ast-source-ranges.h"
8 #include "src/ast/ast.h"
9 #include "src/base/hashmap.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer/deoptimizer.h"
12 #include "src/execution/frames-inl.h"
13 #include "src/execution/isolate.h"
14 #include "src/objects/debug-objects-inl.h"
15 #include "src/objects/objects.h"
16
17 namespace v8 {
18 namespace internal {
19
20 class SharedToCounterMap
21 : public base::TemplateHashMapImpl<SharedFunctionInfo, uint32_t,
22 base::KeyEqualityMatcher<Object>,
23 base::DefaultAllocationPolicy> {
24 public:
25 using Entry = base::TemplateHashMapEntry<SharedFunctionInfo, uint32_t>;
Add(SharedFunctionInfo key,uint32_t count)26 inline void Add(SharedFunctionInfo key, uint32_t count) {
27 Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
28 uint32_t old_count = entry->value;
29 if (UINT32_MAX - count < old_count) {
30 entry->value = UINT32_MAX;
31 } else {
32 entry->value = old_count + count;
33 }
34 }
35
Get(SharedFunctionInfo key)36 inline uint32_t Get(SharedFunctionInfo key) {
37 Entry* entry = Lookup(key, Hash(key));
38 if (entry == nullptr) return 0;
39 return entry->value;
40 }
41
42 private:
Hash(SharedFunctionInfo key)43 static uint32_t Hash(SharedFunctionInfo key) {
44 return static_cast<uint32_t>(key.ptr());
45 }
46
47 DisallowHeapAllocation no_gc;
48 };
49
50 namespace {
StartPosition(SharedFunctionInfo info)51 int StartPosition(SharedFunctionInfo info) {
52 int start = info.function_token_position();
53 if (start == kNoSourcePosition) start = info.StartPosition();
54 return start;
55 }
56
CompareCoverageBlock(const CoverageBlock & a,const CoverageBlock & b)57 bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
58 DCHECK_NE(kNoSourcePosition, a.start);
59 DCHECK_NE(kNoSourcePosition, b.start);
60 if (a.start == b.start) return a.end > b.end;
61 return a.start < b.start;
62 }
63
SortBlockData(std::vector<CoverageBlock> & v)64 void SortBlockData(
65 std::vector<CoverageBlock>& v) { // NOLINT(runtime/references)
66 // Sort according to the block nesting structure.
67 std::sort(v.begin(), v.end(), CompareCoverageBlock);
68 }
69
GetSortedBlockData(SharedFunctionInfo shared)70 std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
71 DCHECK(shared.HasCoverageInfo());
72
73 CoverageInfo coverage_info =
74 CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
75
76 std::vector<CoverageBlock> result;
77 if (coverage_info.slot_count() == 0) return result;
78
79 for (int i = 0; i < coverage_info.slot_count(); i++) {
80 const int start_pos = coverage_info.StartSourcePosition(i);
81 const int until_pos = coverage_info.EndSourcePosition(i);
82 const int count = coverage_info.BlockCount(i);
83
84 DCHECK_NE(kNoSourcePosition, start_pos);
85 result.emplace_back(start_pos, until_pos, count);
86 }
87
88 SortBlockData(result);
89
90 return result;
91 }
92
93 // A utility class to simplify logic for performing passes over block coverage
94 // ranges. Provides access to the implicit tree structure of ranges (i.e. access
95 // to parent and sibling blocks), and supports efficient in-place editing and
96 // deletion. The underlying backing store is the array of CoverageBlocks stored
97 // on the CoverageFunction.
98 class CoverageBlockIterator final {
99 public:
CoverageBlockIterator(CoverageFunction * function)100 explicit CoverageBlockIterator(CoverageFunction* function)
101 : function_(function) {
102 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
103 CompareCoverageBlock));
104 }
105
~CoverageBlockIterator()106 ~CoverageBlockIterator() {
107 Finalize();
108 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
109 CompareCoverageBlock));
110 }
111
HasNext() const112 bool HasNext() const {
113 return read_index_ + 1 < static_cast<int>(function_->blocks.size());
114 }
115
Next()116 bool Next() {
117 if (!HasNext()) {
118 if (!ended_) MaybeWriteCurrent();
119 ended_ = true;
120 return false;
121 }
122
123 // If a block has been deleted, subsequent iteration moves trailing blocks
124 // to their updated position within the array.
125 MaybeWriteCurrent();
126
127 if (read_index_ == -1) {
128 // Initialize the nesting stack with the function range.
129 nesting_stack_.emplace_back(function_->start, function_->end,
130 function_->count);
131 } else if (!delete_current_) {
132 nesting_stack_.emplace_back(GetBlock());
133 }
134
135 delete_current_ = false;
136 read_index_++;
137
138 DCHECK(IsActive());
139
140 CoverageBlock& block = GetBlock();
141 while (nesting_stack_.size() > 1 &&
142 nesting_stack_.back().end <= block.start) {
143 nesting_stack_.pop_back();
144 }
145
146 DCHECK_IMPLIES(block.start >= function_->end,
147 block.end == kNoSourcePosition);
148 DCHECK_NE(block.start, kNoSourcePosition);
149 DCHECK_LE(block.end, GetParent().end);
150
151 return true;
152 }
153
GetBlock()154 CoverageBlock& GetBlock() {
155 DCHECK(IsActive());
156 return function_->blocks[read_index_];
157 }
158
GetNextBlock()159 CoverageBlock& GetNextBlock() {
160 DCHECK(IsActive());
161 DCHECK(HasNext());
162 return function_->blocks[read_index_ + 1];
163 }
164
GetPreviousBlock()165 CoverageBlock& GetPreviousBlock() {
166 DCHECK(IsActive());
167 DCHECK_GT(read_index_, 0);
168 return function_->blocks[read_index_ - 1];
169 }
170
GetParent()171 CoverageBlock& GetParent() {
172 DCHECK(IsActive());
173 return nesting_stack_.back();
174 }
175
HasSiblingOrChild()176 bool HasSiblingOrChild() {
177 DCHECK(IsActive());
178 return HasNext() && GetNextBlock().start < GetParent().end;
179 }
180
GetSiblingOrChild()181 CoverageBlock& GetSiblingOrChild() {
182 DCHECK(HasSiblingOrChild());
183 DCHECK(IsActive());
184 return GetNextBlock();
185 }
186
187 // A range is considered to be at top level if its parent range is the
188 // function range.
IsTopLevel() const189 bool IsTopLevel() const { return nesting_stack_.size() == 1; }
190
DeleteBlock()191 void DeleteBlock() {
192 DCHECK(!delete_current_);
193 DCHECK(IsActive());
194 delete_current_ = true;
195 }
196
197 private:
MaybeWriteCurrent()198 void MaybeWriteCurrent() {
199 if (delete_current_) return;
200 if (read_index_ >= 0 && write_index_ != read_index_) {
201 function_->blocks[write_index_] = function_->blocks[read_index_];
202 }
203 write_index_++;
204 }
205
Finalize()206 void Finalize() {
207 while (Next()) {
208 // Just iterate to the end.
209 }
210 function_->blocks.resize(write_index_);
211 }
212
IsActive() const213 bool IsActive() const { return read_index_ >= 0 && !ended_; }
214
215 CoverageFunction* function_;
216 std::vector<CoverageBlock> nesting_stack_;
217 bool ended_ = false;
218 bool delete_current_ = false;
219 int read_index_ = -1;
220 int write_index_ = -1;
221 };
222
HaveSameSourceRange(const CoverageBlock & lhs,const CoverageBlock & rhs)223 bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
224 return lhs.start == rhs.start && lhs.end == rhs.end;
225 }
226
MergeDuplicateRanges(CoverageFunction * function)227 void MergeDuplicateRanges(CoverageFunction* function) {
228 CoverageBlockIterator iter(function);
229
230 while (iter.Next() && iter.HasNext()) {
231 CoverageBlock& block = iter.GetBlock();
232 CoverageBlock& next_block = iter.GetNextBlock();
233
234 if (!HaveSameSourceRange(block, next_block)) continue;
235
236 DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range.
237 next_block.count = std::max(block.count, next_block.count);
238 iter.DeleteBlock();
239 }
240 }
241
242 // Rewrite position singletons (produced by unconditional control flow
243 // like return statements, and by continuation counters) into source
244 // ranges that end at the next sibling range or the end of the parent
245 // range, whichever comes first.
RewritePositionSingletonsToRanges(CoverageFunction * function)246 void RewritePositionSingletonsToRanges(CoverageFunction* function) {
247 CoverageBlockIterator iter(function);
248
249 while (iter.Next()) {
250 CoverageBlock& block = iter.GetBlock();
251 CoverageBlock& parent = iter.GetParent();
252
253 if (block.start >= function->end) {
254 DCHECK_EQ(block.end, kNoSourcePosition);
255 iter.DeleteBlock();
256 } else if (block.end == kNoSourcePosition) {
257 // The current block ends at the next sibling block (if it exists) or the
258 // end of the parent block otherwise.
259 if (iter.HasSiblingOrChild()) {
260 block.end = iter.GetSiblingOrChild().start;
261 } else if (iter.IsTopLevel()) {
262 // See https://crbug.com/v8/6661. Functions are special-cased because
263 // we never want the closing brace to be uncovered. This is mainly to
264 // avoid a noisy UI.
265 block.end = parent.end - 1;
266 } else {
267 block.end = parent.end;
268 }
269 }
270 }
271 }
272
MergeConsecutiveRanges(CoverageFunction * function)273 void MergeConsecutiveRanges(CoverageFunction* function) {
274 CoverageBlockIterator iter(function);
275
276 while (iter.Next()) {
277 CoverageBlock& block = iter.GetBlock();
278
279 if (iter.HasSiblingOrChild()) {
280 CoverageBlock& sibling = iter.GetSiblingOrChild();
281 if (sibling.start == block.end && sibling.count == block.count) {
282 // Best-effort: this pass may miss mergeable siblings in the presence of
283 // child blocks.
284 sibling.start = block.start;
285 iter.DeleteBlock();
286 }
287 }
288 }
289 }
290
MergeNestedRanges(CoverageFunction * function)291 void MergeNestedRanges(CoverageFunction* function) {
292 CoverageBlockIterator iter(function);
293
294 while (iter.Next()) {
295 CoverageBlock& block = iter.GetBlock();
296 CoverageBlock& parent = iter.GetParent();
297
298 if (parent.count == block.count) {
299 // Transformation may not be valid if sibling blocks exist with a
300 // differing count.
301 iter.DeleteBlock();
302 }
303 }
304 }
305
RewriteFunctionScopeCounter(CoverageFunction * function)306 void RewriteFunctionScopeCounter(CoverageFunction* function) {
307 // Every function must have at least the top-level function counter.
308 DCHECK(!function->blocks.empty());
309
310 CoverageBlockIterator iter(function);
311 if (iter.Next()) {
312 DCHECK(iter.IsTopLevel());
313
314 CoverageBlock& block = iter.GetBlock();
315 if (block.start == SourceRange::kFunctionLiteralSourcePosition &&
316 block.end == SourceRange::kFunctionLiteralSourcePosition) {
317 // If a function-scope block exists, overwrite the function count. It has
318 // a more reliable count than what we get from the FeedbackVector (which
319 // is imprecise e.g. for generator functions and optimized code).
320 function->count = block.count;
321
322 // Then delete it; for compatibility with non-block coverage modes, the
323 // function-scope block is expected in CoverageFunction, not as a
324 // CoverageBlock.
325 iter.DeleteBlock();
326 }
327 }
328 }
329
FilterAliasedSingletons(CoverageFunction * function)330 void FilterAliasedSingletons(CoverageFunction* function) {
331 CoverageBlockIterator iter(function);
332
333 iter.Next(); // Advance once since we reference the previous block later.
334
335 while (iter.Next()) {
336 CoverageBlock& previous_block = iter.GetPreviousBlock();
337 CoverageBlock& block = iter.GetBlock();
338
339 bool is_singleton = block.end == kNoSourcePosition;
340 bool aliases_start = block.start == previous_block.start;
341
342 if (is_singleton && aliases_start) {
343 // The previous block must have a full range since duplicate singletons
344 // have already been merged.
345 DCHECK_NE(previous_block.end, kNoSourcePosition);
346 // Likewise, the next block must have another start position since
347 // singletons are sorted to the end.
348 DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
349 iter.DeleteBlock();
350 }
351 }
352 }
353
FilterUncoveredRanges(CoverageFunction * function)354 void FilterUncoveredRanges(CoverageFunction* function) {
355 CoverageBlockIterator iter(function);
356
357 while (iter.Next()) {
358 CoverageBlock& block = iter.GetBlock();
359 CoverageBlock& parent = iter.GetParent();
360 if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
361 }
362 }
363
FilterEmptyRanges(CoverageFunction * function)364 void FilterEmptyRanges(CoverageFunction* function) {
365 CoverageBlockIterator iter(function);
366
367 while (iter.Next()) {
368 CoverageBlock& block = iter.GetBlock();
369 if (block.start == block.end) iter.DeleteBlock();
370 }
371 }
372
ClampToBinary(CoverageFunction * function)373 void ClampToBinary(CoverageFunction* function) {
374 CoverageBlockIterator iter(function);
375
376 while (iter.Next()) {
377 CoverageBlock& block = iter.GetBlock();
378 if (block.count > 0) block.count = 1;
379 }
380 }
381
ResetAllBlockCounts(SharedFunctionInfo shared)382 void ResetAllBlockCounts(SharedFunctionInfo shared) {
383 DCHECK(shared.HasCoverageInfo());
384
385 CoverageInfo coverage_info =
386 CoverageInfo::cast(shared.GetDebugInfo().coverage_info());
387
388 for (int i = 0; i < coverage_info.slot_count(); i++) {
389 coverage_info.ResetBlockCount(i);
390 }
391 }
392
IsBlockMode(debug::CoverageMode mode)393 bool IsBlockMode(debug::CoverageMode mode) {
394 switch (mode) {
395 case debug::CoverageMode::kBlockBinary:
396 case debug::CoverageMode::kBlockCount:
397 return true;
398 default:
399 return false;
400 }
401 }
402
IsBinaryMode(debug::CoverageMode mode)403 bool IsBinaryMode(debug::CoverageMode mode) {
404 switch (mode) {
405 case debug::CoverageMode::kBlockBinary:
406 case debug::CoverageMode::kPreciseBinary:
407 return true;
408 default:
409 return false;
410 }
411 }
412
CollectBlockCoverageInternal(CoverageFunction * function,SharedFunctionInfo info,debug::CoverageMode mode)413 void CollectBlockCoverageInternal(CoverageFunction* function,
414 SharedFunctionInfo info,
415 debug::CoverageMode mode) {
416 DCHECK(IsBlockMode(mode));
417
418 // Functions with empty source ranges are not interesting to report. This can
419 // happen e.g. for internally-generated functions like class constructors.
420 if (!function->HasNonEmptySourceRange()) return;
421
422 function->has_block_coverage = true;
423 function->blocks = GetSortedBlockData(info);
424
425 // If in binary mode, only report counts of 0/1.
426 if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
427
428 // To stay compatible with non-block coverage modes, the function-scope count
429 // is expected to be in the CoverageFunction, not as part of its blocks.
430 // This finds the function-scope counter, overwrites CoverageFunction::count,
431 // and removes it from the block list.
432 //
433 // Important: Must be called before other transformation passes.
434 RewriteFunctionScopeCounter(function);
435
436 // Functions without blocks don't need to be processed further.
437 if (!function->HasBlocks()) return;
438
439 // Remove singleton ranges with the same start position as a full range and
440 // throw away their counts.
441 // Singleton ranges are only intended to split existing full ranges and should
442 // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
443 // as a problematic example; if the then-block produces a continuation
444 // singleton, it would incorrectly expand into the else range.
445 // For more context, see https://crbug.com/v8/8237.
446 FilterAliasedSingletons(function);
447
448 // Rewrite all singletons (created e.g. by continuations and unconditional
449 // control flow) to ranges.
450 RewritePositionSingletonsToRanges(function);
451
452 // Merge nested and consecutive ranges with identical counts.
453 // Note that it's necessary to merge duplicate ranges prior to merging nested
454 // changes in order to avoid invalid transformations. See crbug.com/827530.
455 MergeConsecutiveRanges(function);
456
457 SortBlockData(function->blocks);
458 MergeDuplicateRanges(function);
459 MergeNestedRanges(function);
460
461 MergeConsecutiveRanges(function);
462
463 // Filter out ranges with count == 0 unless the immediate parent range has
464 // a count != 0.
465 FilterUncoveredRanges(function);
466
467 // Filter out ranges of zero length.
468 FilterEmptyRanges(function);
469 }
470
CollectBlockCoverage(CoverageFunction * function,SharedFunctionInfo info,debug::CoverageMode mode)471 void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
472 debug::CoverageMode mode) {
473 CollectBlockCoverageInternal(function, info, mode);
474
475 // Reset all counters on the DebugInfo to zero.
476 ResetAllBlockCounts(info);
477 }
478
PrintBlockCoverage(const CoverageFunction * function,SharedFunctionInfo info,bool has_nonempty_source_range,bool function_is_relevant)479 void PrintBlockCoverage(const CoverageFunction* function,
480 SharedFunctionInfo info, bool has_nonempty_source_range,
481 bool function_is_relevant) {
482 DCHECK(FLAG_trace_block_coverage);
483 std::unique_ptr<char[]> function_name =
484 function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
485 i::PrintF(
486 "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, "
487 "function_is_relevant=%d\n",
488 function_name.get(), reinterpret_cast<void*>(info.ptr()),
489 has_nonempty_source_range, function_is_relevant);
490 i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end,
491 function->count);
492 for (const auto& block : function->blocks) {
493 i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end,
494 block.count);
495 }
496 }
497
CollectAndMaybeResetCounts(Isolate * isolate,SharedToCounterMap * counter_map,v8::debug::CoverageMode coverage_mode)498 void CollectAndMaybeResetCounts(Isolate* isolate,
499 SharedToCounterMap* counter_map,
500 v8::debug::CoverageMode coverage_mode) {
501 const bool reset_count =
502 coverage_mode != v8::debug::CoverageMode::kBestEffort;
503
504 switch (isolate->code_coverage_mode()) {
505 case v8::debug::CoverageMode::kBlockBinary:
506 case v8::debug::CoverageMode::kBlockCount:
507 case v8::debug::CoverageMode::kPreciseBinary:
508 case v8::debug::CoverageMode::kPreciseCount: {
509 // Feedback vectors are already listed to prevent losing them to GC.
510 DCHECK(isolate->factory()
511 ->feedback_vectors_for_profiling_tools()
512 ->IsArrayList());
513 Handle<ArrayList> list = Handle<ArrayList>::cast(
514 isolate->factory()->feedback_vectors_for_profiling_tools());
515 for (int i = 0; i < list->Length(); i++) {
516 FeedbackVector vector = FeedbackVector::cast(list->Get(i));
517 SharedFunctionInfo shared = vector.shared_function_info();
518 DCHECK(shared.IsSubjectToDebugging());
519 uint32_t count = static_cast<uint32_t>(vector.invocation_count());
520 if (reset_count) vector.clear_invocation_count();
521 counter_map->Add(shared, count);
522 }
523 break;
524 }
525 case v8::debug::CoverageMode::kBestEffort: {
526 DCHECK(!isolate->factory()
527 ->feedback_vectors_for_profiling_tools()
528 ->IsArrayList());
529 DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode);
530 HeapObjectIterator heap_iterator(isolate->heap());
531 for (HeapObject current_obj = heap_iterator.Next();
532 !current_obj.is_null(); current_obj = heap_iterator.Next()) {
533 if (!current_obj.IsJSFunction()) continue;
534 JSFunction func = JSFunction::cast(current_obj);
535 SharedFunctionInfo shared = func.shared();
536 if (!shared.IsSubjectToDebugging()) continue;
537 if (!(func.has_feedback_vector() ||
538 func.has_closure_feedback_cell_array())) {
539 continue;
540 }
541 uint32_t count = 0;
542 if (func.has_feedback_vector()) {
543 count =
544 static_cast<uint32_t>(func.feedback_vector().invocation_count());
545 } else if (func.raw_feedback_cell().interrupt_budget() <
546 FLAG_budget_for_feedback_vector_allocation) {
547 // We haven't allocated feedback vector, but executed the function
548 // atleast once. We don't have precise invocation count here.
549 count = 1;
550 }
551 counter_map->Add(shared, count);
552 }
553
554 // Also check functions on the stack to collect the count map. With lazy
555 // feedback allocation we may miss counting functions if the feedback
556 // vector wasn't allocated yet and the function's interrupt budget wasn't
557 // updated (i.e. it didn't execute return / jump).
558 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
559 SharedFunctionInfo shared = it.frame()->function().shared();
560 if (counter_map->Get(shared) != 0) continue;
561 counter_map->Add(shared, 1);
562 }
563 break;
564 }
565 }
566 }
567
568 // A {SFI, count} tuple is used to sort by source range (stored on
569 // the SFI) and call count (in the counter map).
570 struct SharedFunctionInfoAndCount {
SharedFunctionInfoAndCountv8::internal::__anonaab7b69a0211::SharedFunctionInfoAndCount571 SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count)
572 : info(info),
573 count(count),
574 start(StartPosition(info)),
575 end(info.EndPosition()) {}
576
577 // Sort by:
578 // - start, ascending.
579 // - end, descending.
580 // - info.is_toplevel() first
581 // - count, descending.
operator <v8::internal::__anonaab7b69a0211::SharedFunctionInfoAndCount582 bool operator<(const SharedFunctionInfoAndCount& that) const {
583 if (this->start != that.start) return this->start < that.start;
584 if (this->end != that.end) return this->end > that.end;
585 if (this->info.is_toplevel() != that.info.is_toplevel()) {
586 return this->info.is_toplevel();
587 }
588 return this->count > that.count;
589 }
590
591 SharedFunctionInfo info;
592 uint32_t count;
593 int start;
594 int end;
595 };
596
597 } // anonymous namespace
598
CollectPrecise(Isolate * isolate)599 std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
600 DCHECK(!isolate->is_best_effort_code_coverage());
601 std::unique_ptr<Coverage> result =
602 Collect(isolate, isolate->code_coverage_mode());
603 if (!isolate->is_collecting_type_profile() &&
604 (isolate->is_precise_binary_code_coverage() ||
605 isolate->is_block_binary_code_coverage())) {
606 // We do not have to hold onto feedback vectors for invocations we already
607 // reported. So we can reset the list.
608 isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
609 }
610 return result;
611 }
612
CollectBestEffort(Isolate * isolate)613 std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
614 return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
615 }
616
Collect(Isolate * isolate,v8::debug::CoverageMode collectionMode)617 std::unique_ptr<Coverage> Coverage::Collect(
618 Isolate* isolate, v8::debug::CoverageMode collectionMode) {
619 // Collect call counts for all functions.
620 SharedToCounterMap counter_map;
621 CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
622
623 // Iterate shared function infos of every script and build a mapping
624 // between source ranges and invocation counts.
625 std::unique_ptr<Coverage> result(new Coverage());
626 Script::Iterator scripts(isolate);
627 for (Script script = scripts.Next(); !script.is_null();
628 script = scripts.Next()) {
629 if (!script.IsUserJavaScript()) continue;
630
631 // Create and add new script data.
632 Handle<Script> script_handle(script, isolate);
633 result->emplace_back(script_handle);
634 std::vector<CoverageFunction>* functions = &result->back().functions;
635
636 std::vector<SharedFunctionInfoAndCount> sorted;
637
638 {
639 // Sort functions by start position, from outer to inner functions.
640 SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
641 for (SharedFunctionInfo info = infos.Next(); !info.is_null();
642 info = infos.Next()) {
643 sorted.emplace_back(info, counter_map.Get(info));
644 }
645 std::sort(sorted.begin(), sorted.end());
646 }
647
648 // Stack to track nested functions, referring function by index.
649 std::vector<size_t> nesting;
650
651 // Use sorted list to reconstruct function nesting.
652 for (const SharedFunctionInfoAndCount& v : sorted) {
653 SharedFunctionInfo info = v.info;
654 int start = v.start;
655 int end = v.end;
656 uint32_t count = v.count;
657
658 // Find the correct outer function based on start position.
659 //
660 // This is, in general, not robust when considering two functions with
661 // identical source ranges; then the notion of inner and outer is unclear.
662 // Identical source ranges arise when the source range of top-most entity
663 // (e.g. function) in the script is identical to the whole script, e.g.
664 // <script>function foo() {}<script>. The script has its own shared
665 // function info, which has the same source range as the SFI for `foo`.
666 // Node.js creates an additional wrapper for scripts (again with identical
667 // source range) and those wrappers will have a call count of zero even if
668 // the wrapped script was executed (see v8:9212). We mitigate this issue
669 // by sorting top-level SFIs first among SFIs with the same source range:
670 // This ensures top-level SFIs are processed first. If a top-level SFI has
671 // a non-zero call count, it gets recorded due to `function_is_relevant`
672 // below (e.g. script wrappers), while top-level SFIs with zero call count
673 // do not get reported (this ensures node's extra wrappers do not get
674 // reported). If two SFIs with identical source ranges get reported, we
675 // report them in decreasing order of call count, as in all known cases
676 // this corresponds to the nesting order. In the case of the script tag
677 // example above, we report the zero call count of `foo` last. As it turns
678 // out, embedders started to rely on functions being reported in nesting
679 // order.
680 // TODO(jgruber): Investigate whether it is possible to remove node's
681 // extra top-level wrapper script, or change its source range, or ensure
682 // that it follows the invariant that nesting order is descending count
683 // order for SFIs with identical source ranges.
684 while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
685 nesting.pop_back();
686 }
687
688 if (count != 0) {
689 switch (collectionMode) {
690 case v8::debug::CoverageMode::kBlockCount:
691 case v8::debug::CoverageMode::kPreciseCount:
692 break;
693 case v8::debug::CoverageMode::kBlockBinary:
694 case v8::debug::CoverageMode::kPreciseBinary:
695 count = info.has_reported_binary_coverage() ? 0 : 1;
696 info.set_has_reported_binary_coverage(true);
697 break;
698 case v8::debug::CoverageMode::kBestEffort:
699 count = 1;
700 break;
701 }
702 }
703
704 Handle<String> name(info.DebugName(), isolate);
705 CoverageFunction function(start, end, count, name);
706
707 if (IsBlockMode(collectionMode) && info.HasCoverageInfo()) {
708 CollectBlockCoverage(&function, info, collectionMode);
709 }
710
711 // Only include a function range if itself or its parent function is
712 // covered, or if it contains non-trivial block coverage.
713 bool is_covered = (count != 0);
714 bool parent_is_covered =
715 (!nesting.empty() && functions->at(nesting.back()).count != 0);
716 bool has_block_coverage = !function.blocks.empty();
717 bool function_is_relevant =
718 (is_covered || parent_is_covered || has_block_coverage);
719
720 // It must also have a non-empty source range (otherwise it is not
721 // interesting to report).
722 bool has_nonempty_source_range = function.HasNonEmptySourceRange();
723
724 if (has_nonempty_source_range && function_is_relevant) {
725 nesting.push_back(functions->size());
726 functions->emplace_back(function);
727 }
728
729 if (FLAG_trace_block_coverage) {
730 PrintBlockCoverage(&function, info, has_nonempty_source_range,
731 function_is_relevant);
732 }
733 }
734
735 // Remove entries for scripts that have no coverage.
736 if (functions->empty()) result->pop_back();
737 }
738 return result;
739 }
740
SelectMode(Isolate * isolate,debug::CoverageMode mode)741 void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
742 if (mode != isolate->code_coverage_mode()) {
743 // Changing the coverage mode can change the bytecode that would be
744 // generated for a function, which can interfere with lazy source positions,
745 // so just force source position collection whenever there's such a change.
746 isolate->CollectSourcePositionsForAllBytecodeArrays();
747 }
748
749 switch (mode) {
750 case debug::CoverageMode::kBestEffort:
751 // Note that DevTools switches back to best-effort coverage once the
752 // recording is stopped. Since we delete coverage infos at that point, any
753 // following coverage recording (without reloads) will be at function
754 // granularity.
755 isolate->debug()->RemoveAllCoverageInfos();
756 if (!isolate->is_collecting_type_profile()) {
757 isolate->SetFeedbackVectorsForProfilingTools(
758 ReadOnlyRoots(isolate).undefined_value());
759 }
760 break;
761 case debug::CoverageMode::kBlockBinary:
762 case debug::CoverageMode::kBlockCount:
763 case debug::CoverageMode::kPreciseBinary:
764 case debug::CoverageMode::kPreciseCount: {
765 HandleScope scope(isolate);
766
767 // Remove all optimized function. Optimized and inlined functions do not
768 // increment invocation count.
769 Deoptimizer::DeoptimizeAll(isolate);
770
771 std::vector<Handle<JSFunction>> funcs_needing_feedback_vector;
772 {
773 HeapObjectIterator heap_iterator(isolate->heap());
774 for (HeapObject o = heap_iterator.Next(); !o.is_null();
775 o = heap_iterator.Next()) {
776 if (o.IsJSFunction()) {
777 JSFunction func = JSFunction::cast(o);
778 if (func.has_closure_feedback_cell_array()) {
779 funcs_needing_feedback_vector.push_back(
780 Handle<JSFunction>(func, isolate));
781 }
782 } else if (IsBinaryMode(mode) && o.IsSharedFunctionInfo()) {
783 // If collecting binary coverage, reset
784 // SFI::has_reported_binary_coverage to avoid optimizing / inlining
785 // functions before they have reported coverage.
786 SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
787 shared.set_has_reported_binary_coverage(false);
788 } else if (o.IsFeedbackVector()) {
789 // In any case, clear any collected invocation counts.
790 FeedbackVector::cast(o).clear_invocation_count();
791 }
792 }
793 }
794
795 for (Handle<JSFunction> func : funcs_needing_feedback_vector) {
796 IsCompiledScope is_compiled_scope(
797 func->shared().is_compiled_scope(isolate));
798 CHECK(is_compiled_scope.is_compiled());
799 JSFunction::EnsureFeedbackVector(func, &is_compiled_scope);
800 }
801
802 // Root all feedback vectors to avoid early collection.
803 isolate->MaybeInitializeVectorListFromHeap();
804
805 break;
806 }
807 }
808 isolate->set_code_coverage_mode(mode);
809 }
810
811 } // namespace internal
812 } // namespace v8
813